code
stringlengths
46
37.2k
language
stringclasses
9 values
AST_depth
int64
3
30
alphanumeric_fraction
float64
0.2
0.91
max_line_length
int64
13
399
avg_line_length
float64
5.67
140
num_lines
int64
7
299
original_docstring
stringlengths
22
42.6k
source
stringclasses
2 values
public static synchronized void createLocator() { if (TestConfig.tab().booleanAt(Prms.manageLocatorAgents)) { String s = "Locators are being managed by the hydra master controller. " + "Set " + BasePrms.nameForKey(Prms.manageLocatorAgents) + "=false to use hydra-client-managed locators."; throw new HydraConfigException(s); } Locator locator = getLocator(); Endpoint endpoint = findEndpoint(); if (locator == null) { if (endpoint == null) { generateEndpoint(); } } else if (endpoint == null) { String s = "Locator was already started without DistributedSystemHelper" + " using an unknown port"; throw new HydraRuntimeException(s); } }
java
12
0.627477
79
38.894737
19
/** * Creates a locator endpoint that this VM and other locator VMs in its * distributed system can use when configuring their locator lists. The * endpoint need only be created once. * <p> * Upon initial creation, selects a random port and registers the locator * {@link Endpoint} in the {@link DistributedSystemBlackboard} map. The * locator will always use this port during startup. * <p> * All locators associated with a given distributed system must be created * before locators are started, so that the locator list is complete. * At least one locator must be started before non-locators can connect. * <p> * To start the locator, use {@link #startLocatorAndDS} or {@link * #startLocatorAndAdminDS}. * * @throws HydraRuntimeException if the VM already has a locator that did * not create its endpoint using this method or if master is managing * locators. */
function
internal class CommandExecutionData { private readonly IConsole console; public CommandExecutionData( string? repositoryPath, string? from, string to, bool verbose, IEnumerable<string>? assumeChanges, IConsole console) { this.RepositoryPath = repositoryPath ?? Environment.CurrentDirectory; this.To = to; this.From = from; this.Verbose = verbose; this.AssumeChanges = assumeChanges ?? Enumerable.Empty<string>(); this.console = console; } public string RepositoryPath { get; } public string? From { get; } public string To { get; } public bool Verbose { get; } public IEnumerable<string> AssumeChanges { get; } public CommandExecutionContext BuildExecutionContext() { return new CommandExecutionContext(this, this.console); } }
c#
12
0.58104
81
34.071429
28
/// <summary> /// This class is resolved by <see cref="System.CommandLine"/> /// automatically. It "injects" the value of global options /// and also the ones of the current command being executed. /// </summary>
class
public static StringBuilder textifyStacktrace(String prefix, StackTraceElement[] stes, int numberOfFramesToSkip) { StringBuilder buf = new StringBuilder(); for (int i = numberOfFramesToSkip; i < stes.length; i++) { buf.append(prefix); buf.append(stes[i].toString()); buf.append("\n"); } return buf; }
java
10
0.604839
114
40.444444
9
/** * Given a stack trace, generally generated with * * StackTraceElement[] stes = Thread.currentThread().getStackTrace(); * * write it out from back (topmost stackframe) to deepest stackframe, as is the Java custom skipping the highest * "numberOfFramesToSkip" because the caller deems these to be without interest. * * prefix is added in each line */
function
static void mq_5b_check(fmd_hdl_t *hdl, cmd_dimm_t *dimm) { nvlist_t *flt; fmd_case_t *cp; cmd_mq_t *ip, *next; int cw; for (cw = 0; cw < CMD_MAX_CKWDS; cw++) { for (ip = cmd_list_next(&dimm->mq_root[cw]); ip != NULL; ip = next) { next = cmd_list_next(ip); if (ip->mq_dupce_count >= cmd.cmd_dupce) { cp = fmd_case_open(hdl, NULL); flt = cmd_dimm_create_fault(hdl, dimm, "fault.memory.dimm-page-retires-excessive", CMD_FLTMAXCONF); dimm->dimm_flags |= CMD_MEM_F_FAULTING; cmd_dimm_dirty(hdl, dimm); fmd_case_add_suspect(hdl, cp, flt); fmd_case_solve(hdl, cp); fmd_hdl_debug(hdl, "mq_5b_check succeeded: duplicate CE=%d", ip->mq_dupce_count); return; } } } }
c
14
0.579866
51
25.642857
28
/* * rule 5b checking. The check succeeds if * more than 120 non-intermittent CEs are reported against one symbol * position of one afar in 72 hours. */
function
static BOOLEAN CheckSave(TFile *pFile, void *_pContext) { TCheckSaveContext *pContext; int nFileNumber; dispc_t *disp; cmdc_t cmdc; ASSERT(VALID_PFILE(pFile)); pContext = _pContext; disp = wrkspace_get_disp(pContext->wrkspace); if (pFile->bChanged) { if (!pContext->bCheck) goto _saveafile; switch (ConsoleMessageProc(disp, NULL, MSG_WARNING | MSG_YESNOCANCEL, pFile->sFileName, sAskSaveName)) { case 0: _saveafile: nFileNumber = SearchFileList(pFilesInMemoryList, pFile->sFileName, 0); ASSERT(nFileNumber >= 0); SetTopFileByLoadNumber(pFilesInMemoryList, nFileNumber); CMDC_SET(cmdc, pContext->wrkspace, pFile); CmdFileSave(&cmdc); break; case 1: return TRUE; case 2: pContext->bCanceled = TRUE; return FALSE; default: ASSERT(0); } } return TRUE; }
c
14
0.624588
78
25.057143
35
/* ************************************************************************ Function: CheckSave Description: A call-back function invoked to check wether the file is changed and to ask user to save the file to disk. */
function
struct backlight_device *backlight_device_register(const char *name, void *devdata, struct backlight_properties *bp) { int i, rc; struct backlight_device *new_bd; pr_debug("backlight_device_alloc: name=%s\n", name); new_bd = kmalloc(sizeof(struct backlight_device), GFP_KERNEL); if (unlikely(!new_bd)) return ERR_PTR(-ENOMEM); init_MUTEX(&new_bd->sem); new_bd->props = bp; memset(&new_bd->class_dev, 0, sizeof(new_bd->class_dev)); new_bd->class_dev.class = &backlight_class; strlcpy(new_bd->class_dev.class_id, name, KOBJ_NAME_LEN); class_set_devdata(&new_bd->class_dev, devdata); rc = class_device_register(&new_bd->class_dev); if (unlikely(rc)) { error: kfree(new_bd); return ERR_PTR(rc); } memset(&new_bd->fb_notif, 0, sizeof(new_bd->fb_notif)); new_bd->fb_notif.notifier_call = fb_notifier_callback; rc = fb_register_client(&new_bd->fb_notif); if (unlikely(rc)) goto error; for (i = 0; i < ARRAY_SIZE(bl_class_device_attributes); i++) { rc = class_device_create_file(&new_bd->class_dev, &bl_class_device_attributes[i]); if (unlikely(rc)) { while (--i >= 0) class_device_remove_file(&new_bd->class_dev, &bl_class_device_attributes[i]); class_device_unregister(&new_bd->class_dev); return ERR_PTR(rc); } } return new_bd; }
c
13
0.663587
83
33.210526
38
/** * backlight_device_register - create and register a new object of * backlight_device class. * @name: the name of the new object(must be the same as the name of the * respective framebuffer device). * @devdata: an optional pointer to be stored in the class_device. The * methods may retrieve it by using class_get_devdata(&bd->class_dev). * @bp: the backlight properties structure. * * Creates and registers new backlight class_device. Returns either an * ERR_PTR() or a pointer to the newly allocated device. */
function
static size_t visitUDivOperand(Value *Op0, Value *Op1, const BinaryOperator &I, SmallVectorImpl<UDivFoldAction> &Actions, unsigned Depth = 0) { if (match(Op1, m_Power2())) { Actions.push_back(UDivFoldAction(foldUDivPow2Cst, Op1)); return Actions.size(); } if (ConstantInt *C = dyn_cast<ConstantInt>(Op1)) if (C->getValue().isNegative()) { Actions.push_back(UDivFoldAction(foldUDivNegCst, C)); return Actions.size(); } if (match(Op1, m_Shl(m_Power2(), m_Value())) || match(Op1, m_ZExt(m_Shl(m_Power2(), m_Value())))) { Actions.push_back(UDivFoldAction(foldUDivShl, Op1)); return Actions.size(); } if (Depth++ == MaxDepth) return 0; if (SelectInst *SI = dyn_cast<SelectInst>(Op1)) if (size_t LHSIdx = visitUDivOperand(Op0, SI->getOperand(1), I, Actions, Depth)) if (visitUDivOperand(Op0, SI->getOperand(2), I, Actions, Depth)) { Actions.push_back(UDivFoldAction(nullptr, Op1, LHSIdx - 1)); return Actions.size(); } return 0; }
c++
14
0.60348
79
38.035714
28
// \brief Recursively visits the possible right hand operands of a udiv // instruction, seeing through select instructions, to determine if we can // replace the udiv with something simpler. If we find that an operand is not // able to simplify the udiv, we abort the entire transformation.
function
class RoleAssignor: """ Handles commands for adding or removing a role, adding or removing a channel, or adding a user to a configured role @TODO: allow owners to add roles/channels simply by passing in the name of the role or channel instead of requiring them to use the snowflake ID; if there happens to be more than one of either, then require the owner to use the specific ID """ def __init__(self, bot): self.bot = bot @commands.command(pass_context=True, no_pm=False, name='guild') @commands.cooldown(rate=1, per=1, type=commands.BucketType.user) @asyncio.coroutine def assign_role(self, ctx, *, guild: str, member: discord.Member = None): """ Assign users to configured roles if requested. Command is executed via the `guild` command. Examples: > guild Test {user.mention}: You've been successfully added to {guild_name}. > guild Test {user.mention}: You've been removed from {guild_name}. :param ctx: discord.py Context :param guild: the requested group name, uses consume rest behavior :param member: optional discord.Member object :return: Nothing """ server_id = ctx.message.server.id if member is None: member = ctx.message.author plugin_enabled = ConfigLoader().load_server_boolean_setting( server_id, 'RoleAssignment', 'enabled' ) if member is not None and plugin_enabled: requested_guild = discord.utils.get(ctx.message.server.roles, name=guild) if requested_guild is None: # We ran into an issue where a role name was using acute accents # This is the attempt to fix that if requested_guild is none # If still none after this we'll need to get examples to fix it guild = guild.replace("'", "’") requested_guild = discord.utils.get(ctx.message.server.roles, name=guild) role_list = ConfigLoader().load_server_string_setting( server_id, 'RoleAssignment', 'role_list' ) assignment_channel_list = ConfigLoader().load_server_string_setting( server_id, 'RoleAssignment', 'assignment_channel_id' ) if role_list == 'NOT_SET' or assignment_channel_list == 'NOT_SET': yield from self.bot.say("This plugin is not configured for this server.") return channel_list = [] for channel in map(int, assignment_channel_list.split()): channel_list.append(channel) if requested_guild is not None: role_list_split = [] for role in map(int, role_list.split()): role_list_split.append(role) if int(ctx.message.channel.id) in channel_list and \ int(requested_guild.id) in role_list_split: for role in ctx.message.author.roles: if role.id == requested_guild.id: yield from self.bot.remove_roles( ctx.message.author, requested_guild) yield from self.bot.send_message( ctx.message.channel, "{0.mention}: You've been removed from {1}." .format(member, requested_guild.name) ) return # So we got this far, add the user to the role yield from self.bot.add_roles(ctx.message.author, requested_guild) yield from self.bot.send_message( ctx.message.channel, "{0.mention}: You've been successfully " "added to {1}!".format(member, requested_guild.name)) return return @commands.command(pass_context=True, no_pm=False, name='role') @commands.cooldown(rate=1, per=1, type=commands.BucketType.user) async def update_role_list(self, ctx, add_or_remove: str, role_id: str, member: discord.Member = None ): """ Update the configured role list to add or remove a group. Command is executed via the `role` command. Examples: > role add Test Configuration file updated. > role add Test Role already added. > role test Test Please specify if I am adding or removing a role. > role remove Test Configuration file updated. :param ctx: discord.py Context :param add_or_remove: (str) [add, remove] passed in string to determine if a role is being added or removed :param role_id: discord snowflake ID for the role, can be added via direct pinging of the role :param member: optional discord.Member object :return: """ member = ctx.message.author server_id = str(ctx.message.server.id) if member is not None: if add_or_remove != 'add' and add_or_remove != 'remove': return await self.bot.say("Please specify if I am adding or removing a role.") current_role_list = ConfigLoader().load_server_string_setting( server_id, 'RoleAssignment', 'role_list' ) # Somewhat ugly fix for when mentioning the role to strip stuff out role_id = role_id.replace('<@&', '') role_id = role_id.replace('>', '') role_id = role_id.strip() updated_role_list = '' if add_or_remove == 'add': if not BotResources().contains_word(current_role_list, role_id): if current_role_list == 'NOT_SET': updated_role_list = role_id else: updated_role_list = current_role_list + " " + role_id else: return await self.bot.say("Role already added.") if add_or_remove == 'remove': if BotResources().contains_word(current_role_list, role_id): updated_role_list = current_role_list.replace(role_id, '') if updated_role_list.isspace() or len(updated_role_list) == 0: updated_role_list = 'NOT_SET' filename = ctx.message.server.id await ConfigCommands(self.bot).update_config_file( filename, 'RoleAssignment', 'role_list', updated_role_list.strip(), ctx.message ) @commands.command(pass_context=True, no_pm=False, name='rolechannel') @commands.cooldown(rate=1, per=1, type=commands.BucketType.user) async def update_channel_list(self, ctx, add_or_remove: str, channel_id: str, member: discord.Member = None ): """ Update the configured channel list to add or remove a channel where the guild command can be used. Command is executed via the `rolechannel` command. Examples: > rolechannel add 1234567890 Configuration file updated. > rolechannel add 1234567890 Role already added. > rolechannel test 1234567890 Please specify if I am adding or removing a channel. > rolechannel remove 1234567890 Configuration file updated. :param ctx: discord.py Context :param add_or_remove: (str) [add, remove] passed in string to determine if a channel is being added or removed :param channel_id: discord snowflake ID for the channel, requires the direct ID and cannot be added via pinging :param member: optional discord.Member object :return: """ member = ctx.message.author server_id = str(ctx.message.server.id) if member is not None: if add_or_remove != 'add' and add_or_remove != 'remove': return await self.bot.say("Please specify if I am adding or removing a channel.") current_channel_list = ConfigLoader().load_server_config_setting( server_id, 'RoleAssignment', 'assignment_channel_id' ) if add_or_remove == 'add': if not BotResources().contains_word(current_channel_list, channel_id): if current_channel_list == 'NOT_SET': updated_channel_list = channel_id else: updated_channel_list = current_channel_list + " " + channel_id else: return await self.bot.say("Channel already added.") if add_or_remove == 'remove': if BotResources().contains_word(current_channel_list, channel_id): updated_channel_list = current_channel_list.strip(' ' + channel_id + ' ') if updated_channel_list.isspace() or len(updated_channel_list) == 0: updated_channel_list = 'NOT_SET' filename = ctx.message.server.id await ConfigCommands(self.bot).update_config_file( filename, 'RoleAssignment', 'assignment_channel_id', updated_channel_list.strip(), ctx.message )
python
23
0.542113
119
39.987448
239
Handles commands for adding or removing a role, adding or removing a channel, or adding a user to a configured role @TODO: allow owners to add roles/channels simply by passing in the name of the role or channel instead of requiring them to use the snowflake ID; if there happens to be more than one of either, then require the owner to use the specific ID
class
public boolean updateNodeColor( long key, int nodeColor ) { int index = index( key ); if (index < 0) { return false; } _colors[index] = nodeColor; return true; }
java
7
0.516129
59
26.25
8
/** * update nodeColor if if doesn't exist, else return false because there is * not an entry already for the given key. * @param key * @param nodeColor * @return */
function
public LeftRightFound GetLeftRight(float pos) { var needle = new ColorStop {Position = pos}; ColorStop l = null; ColorStop r = null; var idx = Stops.BinarySearch(needle); var notfound = false; if (idx < 0) { idx = ~idx; } if (idx == Stops.Count) { notfound = true; r = new ColorStop(Stops[idx - 1]); if ((idx - 2) >= 0) { l = new ColorStop(Stops[idx - 2]); } else { l = new ColorStop(r); r.Position = 1.0f; } } else { r = new ColorStop(Stops[idx]); if ((idx - 1) >= 0) { l = new ColorStop(Stops[idx - 1]); } else { l = new ColorStop(r) {Position = 0.0f}; } } return new LeftRightFound(l, r, notfound); }
c#
17
0.513477
47
18.051282
39
/// <summary> /// Get a ColorStop on the left of pos, and on the right of pos. /// Third tuple member is true when no stop was found that actually /// matches pos /// </summary> /// <param name="pos"></param> /// <returns></returns>
function
static int gattc_disc_char_ind_handler(ke_msg_id_t const msgid, struct gattc_disc_char_ind const *param, ke_task_id_t const dest_id, ke_task_id_t const src_id) { struct blpc_env_tag *blpc_env = PRF_CLIENT_GET_ENV(dest_id, blpc); prf_search_chars(blpc_env->bps.svc.ehdl, BLPC_CHAR_MAX, &blpc_env->bps.chars[0], &blpc_bps_char[0], param, &blpc_env->last_char_code); return (KE_MSG_CONSUMED); }
c
10
0.509946
82
49.363636
11
/** **************************************************************************************** * @brief Handles reception of the @ref GATTC_DISC_CHAR_IND message. * Characteristics for the currently desired service handle range are obtained and kept. * @param[in] msgid Id of the message received (probably unused). * @param[in] param Pointer to the parameters of the message. * @param[in] dest_id ID of the receiving task instance (probably unused). * @param[in] src_id ID of the sending task instance. * @return If the message was consumed or not. **************************************************************************************** */
function
public static String load(String type) throws Exception { String content = ""; if ("GET".equalsIgnoreCase(type)) { URI uri = new UrlBuilder(AnwUrlUtil.getAccessTokenUrlWithUrlParams()).getURI(); content = HttpsCall.get(uri).getContent(); } else if ("POST".equalsIgnoreCase(type)) { content = AnwUrlUtil.getAccessTokenViaPost(); } LOG.info("Access Token - retrieved response: " + content); AccessTokenDto accessToken = JsonUtil.getObject(content, AccessTokenDto.class); Property.saveAccessToken(accessToken.getAccess_token()); LOG.info("Access Token - loaded access token = " + accessToken.getAccess_token()); return accessToken.getAccess_token(); }
java
12
0.672507
90
48.533333
15
/** * Method, which retrieves access token from SAP Anywhere, save it and return it. * * @param offset Type of SAP Anywhere call - GET, or POST * @return value of access token * @throws Exception possible exception during retrieving, processing and saving access token */
function
def train(df,df2,df4): dict2, dict3 = markov_model_motif(df) dict2_non, dict3_non = markov_model_motif(df2) for i in range(len(df4)): list1 = [] list2 = [] for i in range(len(df4)): seq = df4["seq"][i] score_motif,dict5,dict6 = motif_score(dict2, dict3, seq) score_nonmotif,dict7,dict8 = motif_score(dict2_non, dict3_non, seq) list1.append(score_motif-score_nonmotif) if score_motif > score_nonmotif: list2.append(1) else: list2.append(0) df4["predicted"] = list2 df4["log odd score"] = list1 print(df4) if score_motif > score_nonmotif: return dict6 else: return dict8
python
11
0.615616
73
29.318182
22
df = for the motif markov model - seq df2 = for the non motif markov model - seq df4 = the test file - pass the entire thing = seq + bound Returns: the final dictionaries
function
public void onConfiguration(HobbitOps.View view, boolean firstTimeIn) { mHobbitView = new WeakReference<>(view); if (firstTimeIn == false && mCursor != null) mHobbitView.get().displayCursor(mCursor); }
java
8
0.553957
54
38.857143
7
/** * Hook method dispatched by the GenericActivity framework to * initialize the HobbitOpsImpl object after it's been created. * * @param view The currently active HobbitOps.View. * @param firstTimeIn Set to "true" if this is the first time the * Ops class is initialized, else set to * "false" if called after a runtime * configuration change. */
function
func (d *DiscoveryService) regularPeerDiscovery() { if !d.baseServer.HasFreeConnectionSlot(network.DirOutbound) { return } peerID := d.baseServer.GetRandomPeer() if peerID == nil { return } if err := d.attemptToFindPeers(*peerID); err != nil { d.logger.Error( "Failed to find new peers", "peer", peerID, "err", err, ) } }
go
9
0.655271
62
18.555556
18
// regularPeerDiscovery grabs a random peer from the list of // connected peers, and attempts to find / connect to their peer set
function
@Override @AvailMethod A_Definition o_LookupByValuesFromList ( final AvailObject object, final List<? extends A_BasicObject> argumentList) throws MethodDefinitionException { final LookupTree<A_Definition, A_Tuple, Boolean> tree = object.slot(PRIVATE_TESTING_TREE).javaObjectNotNull(); final A_Tuple results = runtimeDispatcher.lookupByValues(tree, argumentList, TRUE); return MethodDefinitionException.extractUniqueMethod(results); }
java
8
0.794702
64
36.833333
12
/** * Look up the definition to invoke, given an array of argument values. * Use the testingTree to find the definition to invoke (answer nil if * a lookup error occurs). */
function
public static boolean verifyDetached(Allocated message, Allocated signature, PublicKey publicKey) { int rc = Sodium.crypto_sign_verify_detached( signature.pointer(), message.pointer(), message.length(), publicKey.value.pointer()); if (rc == -1) { return false; } if (rc != 0) { throw new SodiumException("crypto_sign_verify_detached: failed with result " + rc); } return true; }
java
10
0.625
99
31.071429
14
/** * Decrypt a message using a given key. * * @param message The cipher text to decrypt. * @param signature The public key of the sender. * @param publicKey The secret key of the receiver. * @return whether the signature matches the message according to the public key. */
function
def azimavg(data: np.ndarray, dataerr: Optional[np.ndarray], mask: Optional[np.ndarray], wavelength: float, distance: float, pixelsize: float, center_row: float, center_row_err: float, center_col: float, center_col_err: float, N: int, errorprop: int = 3, phierrorprop: int = 3, interval: Optional[Tuple[float, float]] = None, limitsinq: bool = True): if interval is not None: if limitsinq: interval = [np.tan(2 * np.arcsin(q / 4 / np.pi)) * distance / pixelsize for q in interval] mask = c_radavg2.maskforannulus(mask, center_row, center_col, interval[0], interval[1]) phi, intensity, error, phierror, area, qmean, qstd = c_radavg2.azimavg( data=data, error=dataerr if dataerr is not None else np.ones_like(data), mask=mask, wavelength=wavelength, distance=distance, pixelsize=pixelsize, center_row=center_row, center_row_unc=center_row_err, center_col=center_col, center_col_unc=center_col_err, N=N, errorprop=errorprop, phierrorprop=phierrorprop ) return phi, intensity, error, phierror, area, qmean, qstd
python
19
0.613323
102
46.384615
26
Perform azimuthal averaging on a scattering pattern. Inputs: data (np.ndarray, two dimensions, double dtype): scattering pattern error (np.ndarray, two dimensions, double dtype): uncertainties of the scattering pattern mask (np.ndarray, two dimensions, uint8 dtype): mask matrix wavelength (double): X-ray wavelength, in nm distance (double): sample-to-detector distance, in mm pixelsize (double): the length of the edge of a square pixel, in mm center_row (double): beam center position, row coordinate, in pixel units center_row_unc (double): uncertainty of the beam center row coordinate, in pixel units center_col (double): beam center position, column coordinate, in pixel units center_col_unc (double): uncertainty of the beam center column coordinate, in pixel units N (int): number of bins errorprop (int, 0-3 inclusive): error propagation type for intensities (see below) qerrorprop (int, 0-3 inclusive): error propagation type for q (see below) interval (2-tuple of floats): lower and upper bounds of the annulus for limiting the averaging limitsinq (bool): the two numbers in `interval` are q values (True) or pixel values (False) Returns: phi, Intensity, Error, phiError, Area, qmean, qstd (all one-dimensional np.ndarrays, length of `qbincenters`) phi (dtype: double): azimuth angle in radians, 0 to 2*pi Intensity (dtype: double): intensity Error (dtype: double): propagated uncertainty of the intensity phiError (dtype: double): propagated uncertainty of phi (radians) Area (dtype: uint32): number of pixels falling into the bins qmean (dtype: double): average of q values in the bin (simple mean) qstd (dtype: double): sample standard deviation of the q values in the bin Requirements: - `data`, `error` and `mask` must be of the same type - values of 0 in `mask` indicate invalid pixels. All other values correspond to valid ones. - beam center coordinates are expressed in pixels, starting from 0. Error propagation types (parameters `errorprop` and `qerrorprop`): 0: values falling into the same bin are considered as independent samples from the same quantity. The bin mean is a weighted mean of the values using 1/sigma^2 as weight. Error is 1/sqrt(sum(sigma^2)). 1: linear: error is simply the mean of errors 2: squared: error is the square root of the mean of squared errors 3: conservative: either `squared` or the RMS of all values, whichever is larger. Units: - beam centers are expected in pixel units - distance and pixel size must be expressed in the same units (mm, cm, etc.) - angles are in radians Binning: - bins go from 0 (included) to 2*pi (excluded). - The first bin is centered on 0 rad - the bin width is 2*pi/N.
function
function insertBundles() { let i = 0; return loopFiles((path, fileStats, next) => { Lib.readJSON(path).then(json => { i++; if (json.resourceType == "Bundle") { return insertBundle(json, fileStats.name, i); } else { console.log(`===> Skipping file "${fileStats.name}" (not a bundle)`.red); } }).then(next, console.error); }); }
javascript
25
0.479821
89
28.8
15
/** * Walks the app input directory, and inserts all the bundles into the database * @returns {Promise<*>} */
function
def _plot_control_commands(self, sim: Simulation, all_axes: AxesTuple): ail_cmd = sim[prp.aileron_cmd] ele_cmd = sim[prp.elevator_cmd] thr_cmd = sim[prp.throttle_cmd] rud_cmd = sim[prp.rudder_cmd] all_axes.axes_stick.plot([ail_cmd], [ele_cmd], 'bo', mfc='none', markersize=10, clip_on=False) all_axes.axes_throttle.plot([0], [thr_cmd], 'bo', mfc='none', markersize=10, clip_on=False) all_axes.axes_rudder.plot([rud_cmd], [0], 'bo', mfc='none', markersize=10, clip_on=False)
python
8
0.587189
99
61.555556
9
Plots agent-commanded actions on the environment figure. :param sim: Simulation to plot control commands from :param all_axes: AxesTuple, collection of axes of subplots to plot on
function
static (bool status, double price, double payment) GetInput() { Console.Write("Cost of item? "); var priceString = Console.ReadLine(); if (!double.TryParse(priceString, out double price)) { Console.WriteLine($"{priceString} isn't a number!"); return (false, 0, 0); } Console.Write("Amount of payment? "); var paymentString = Console.ReadLine(); if (!double.TryParse(paymentString, out double payment)) { Console.WriteLine($"{paymentString} isn't a number!"); return (false, 0, 0); } return (true, price, payment); }
c#
12
0.507565
70
39.444444
18
/// <summary> /// Gets user input for price and payment. /// </summary> /// <returns> /// False if any input can't be parsed to double. Price and payment returned would be 0. /// True if it was possible to parse inputs into doubles. Price and payment returned /// would be as provided by the user. /// </returns>
function
def compute(self, check_range=(None, None)): self.check_since, self.check_until = check_range df = self.df if self.check_since is not None: df = df[df['created_date'] < self.check_since] if self.check_until is not None: df = df[df['created_date'] >= self.check_until] count_new_committers = len(df.index) return count_new_committers
python
11
0.59901
59
44
9
Count number of new committers who committed between the two dates of check range. :param check_range: A tuple which represents the start and end date when new committers will be considered :returns count_new_committers: the number of new committers who committed between the dates of check_range
function
static void PickRandomCard() { valueIndex = pickedValue.Next(1, 14); suitIndex = pickedSuit.Next(1, 5); if (cardDeckRepeatCheck[suitIndex, valueIndex] == false) { while (cardDeckRepeatCheck[suitIndex, valueIndex] == false) { valueIndex = pickedValue.Next(1, 14); suitIndex = pickedSuit.Next(1, 5); } } forCardValues.Add(valueIndex); forCardSuits.Add(suitIndex); }
c#
13
0.536346
71
33
15
/// <summary> /// Method that picks a card at random. It checks it is not repeated and adds it to a list to be later used /// </summary>
function
public abstract class GenericPass { private static final Logger logger = LoggerFactory.getLogger(GenericPass.class); //private static final double EXACT_MATCH_CONFIDENCE = 1.0; private int triggerCount = 0; public void execute(List<List<ResolvedLocation>> possibilitiesToDo, List<ResolvedLocation> bestCandidates) { if(possibilitiesToDo.size()==0){ // bail if there is nothing to disambiguate return; } List<List<ResolvedLocation>> possibilitiesToRemove = disambiguate( possibilitiesToDo, bestCandidates); for(ResolvedLocation pickedCandidate: bestCandidates){ logSelectedCandidate(pickedCandidate); logResolvedLocationInfo(pickedCandidate); } triggerCount+= possibilitiesToRemove.size(); for (List<ResolvedLocation> toRemove : possibilitiesToRemove) { possibilitiesToDo.remove(toRemove); } logger.debug("Still have " + possibilitiesToDo.size() + " lists to do"); } abstract public String getDescription(); abstract protected List<List<ResolvedLocation>> disambiguate( List<List<ResolvedLocation>> possibilitiesToDo, List<ResolvedLocation> bestCandidates); /** * This version of CLAVIN doesn't appear to fill in the confidence correctly * - it says 1.0 for everything. So we need a workaround to see if something * is an exact match. * * @param candidate * @return */ static boolean isExactMatch(ResolvedLocation candidate) { //logger.debug(candidate.getGeoname().name + " EQUALS " + candidate.location.text + " ? " + candidate.getGeoname().name.equals(candidate.location.text)); return candidate.getGeoname().getName().equalsIgnoreCase(candidate.getLocation().getText()); // return candidate.confidence==EXACT_MATCH_CONFIDENCE; } protected static List<ResolvedLocation> getExactMatches(List<ResolvedLocation> candidates){ ArrayList<ResolvedLocation> exactMatches = new ArrayList<ResolvedLocation>(); for( ResolvedLocation item: candidates){ if(GenericPass.isExactMatch(item)){ exactMatches.add(item); } } return exactMatches; } protected static boolean inSameSuperPlace(ResolvedLocation candidate, List<ResolvedLocation> list){ for( ResolvedLocation item: list){ if(candidate.getGeoname().getAdmin1Code().equals(item.getGeoname().getAdmin1Code())){ return true; } } return false; } protected static boolean isCity(ResolvedLocation candidate){ return candidate.getGeoname().getPopulation()>0 && candidate.getGeoname().getFeatureClass()==FeatureClass.P; } protected static boolean isAdminRegion(ResolvedLocation candidate){ return candidate.getGeoname().getPopulation()>0 && candidate.getGeoname().getFeatureClass()==FeatureClass.A; } protected ResolvedLocation findFirstCityCandidate(List<ResolvedLocation> candidates, boolean exactMatchRequired){ for(ResolvedLocation candidate: candidates) { if(isCity(candidate)){ if (exactMatchRequired && isExactMatch(candidate)){ return candidate; } else if (!exactMatchRequired){ return candidate; } } } return null; } protected ResolvedLocation findFirstAdminCandidate(List<ResolvedLocation> candidates, boolean exactMatchRequired){ for(ResolvedLocation candidate: candidates) { if(isAdminRegion(candidate)){ if (exactMatchRequired && isExactMatch(candidate)){ return candidate; } else if (!exactMatchRequired){ return candidate; } } } return null; } /* Logic is now to compare the City place with the Admin/State place. * If City has larger population then choose it. If the City and State are in the same country, * then choose the city (this will favor Paris the city over Paris the district in France). * If the City has lower population and is not in same country then choose the state. */ protected boolean chooseCityOverAdmin(ResolvedLocation cityCandidate, ResolvedLocation adminCandidate){ if (cityCandidate == null){ return false; } else if (adminCandidate == null){ return true; } else { return (cityCandidate.getGeoname().getPopulation() > adminCandidate.getGeoname().getPopulation()) || (cityCandidate.getGeoname().getPrimaryCountryCode() == adminCandidate.getGeoname().getPrimaryCountryCode()); } } protected boolean inSameCountry(ResolvedLocation candidate, List<ResolvedLocation> list){ for( ResolvedLocation item: list){ if(candidate.getGeoname().getPrimaryCountryCode().equals(item.getGeoname().getPrimaryCountryCode())){ return true; } } return false; } public static void logSelectedCandidate(ResolvedLocation candidate){ logger.debug(" PICKED: "+candidate.getLocation().getText()+"@"+candidate.getLocation().getPosition()); } public static void logResolvedLocationInfo(ResolvedLocation resolvedLocation){ GeoName candidatePlace = resolvedLocation.getGeoname(); logger.debug(" "+candidatePlace.getGeonameID()+" "+candidatePlace.getName()+ ", "+ candidatePlace.getAdmin1Code()+ ", " + candidatePlace.getPrimaryCountryCode() + " / "+resolvedLocation.getConfidence() +" / "+candidatePlace.getPopulation() + " / " + candidatePlace.getFeatureClass() + " ( isExactMatch="+isExactMatch(resolvedLocation)+" )"); } /** * How many times has this pass triggered a disambiguation * @return */ public int getTriggerCount(){ return triggerCount; } }
java
26
0.653662
158
40.724138
145
/** * Wrapper around the concept that we can disambiguate ResolvedLocations in passes, building * on the confidence in disambiguation results from preceeding passes. * @author rahulb */
class
def __process_ft2_slow(self,gti_starts,gti_stops): t1,t2,lt = self.START,self.STOP,self.LIVETIME overlaps = N.zeros_like(lt) for i,(gti_t1,gti_t2) in enumerate(zip(gti_starts,gti_stops)): maxi = N.maximum(gti_t1,t1) mini = N.minimum(gti_t2,t2) overlaps += N.maximum(0,mini - maxi) return overlaps/(t2 - t1)
python
10
0.585561
70
45.875
8
Calculate the fraction of each FT2 interval lying within the GTI. Uses a slow, easily-checked algorithm. The complexity is O(t^2) and is prohibitive for mission-length files.
function
fn insert_new_order(match_eng: &mut matching_engine::MatchingEngine) { // Ask user to enter new order println!("Insert new order: "); // Process user input for side of the order println!("Side: (1 = buy, 2 = sell)"); let mut m_side = String::new(); io::stdin().read_line(&mut m_side); let m_side: char = m_side.chars().nth(0).unwrap(); // Process user input for price of the order println!("Price: "); let mut m_price = String::new(); io::stdin().read_line(&mut m_price); let m_price = m_price.trim().parse::<i64>().unwrap(); // Process user input for quantity of the order println!("Quantity: "); let mut m_qty = String::new(); io::stdin().read_line(&mut m_qty); let m_qty = m_qty.trim().parse::<i64>().unwrap(); // Call INSERT function in Matching Engine match_eng.insert(&objects::Order::new(m_qty, m_price, m_side)); }
rust
11
0.653207
70
39.142857
21
/** This function asks users to enter neccessary information in an insertion request: - Side of the order ('1' = BUY, '2' = SELL) - Price of the order - Quantity of the order * NOTE: User inputs are assumed to be in correct format/types. */
function
public final class PomodoroTracker { public static final String URL = "https://jxbrowser-support.teamdev.com/docs/tutorials/jpackage/pomodoro.html"; public static void main(String[] args) { var splash = showSplashScreen(); showBrowser(); splash.dispose(); } private static void showBrowser() { var engine = Engine.newInstance(HARDWARE_ACCELERATED); var browser = engine.newBrowser(); var frame = new JFrame("Pomodoro Tracker"); frame.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { engine.close(); } }); var view = BrowserView.newInstance(browser); frame.add(view, BorderLayout.CENTER); frame.setDefaultCloseOperation(DISPOSE_ON_CLOSE); frame.setSize(1280, 900); frame.setLocationRelativeTo(null); frame.setVisible(true); browser.navigation().loadUrl(URL); } private static JWindow showSplashScreen() { var splash = new JWindow(); splash.getContentPane().add(new JLabel("Loading...", CENTER)); splash.setBounds(500, 150, 300, 200); splash.setVisible(true); return splash; } }
java
15
0.621835
115
33.189189
37
/** * A Pomodoro tracker. * * This app displays a window with the integration browser component that loads and displays * the Pomodoro Tracker web application. */
class
pub fn next_chunks(&self, chunk_size: usize) -> Result<Vec<Self>> { let mut v = Vec::with_capacity(chunk_size); for i in 0..chunk_size { match self.incr(i as u32) { Err(Error::IndexOutOfBound(_)) => break, Err(err) => return Err(err), Ok(r) => v.push(r), } } Ok(v) }
rust
14
0.454545
67
33.090909
11
/// generate a sequence of Addressing from the given /// addressing as starting point up to the `chunk_size`. /// /// the function will return as soon as `chunk_size` is reached /// or at the first `Error::IndexOutOfBound`. ///
function
def copy(self, remotepath = '/', localpath = None): remote_url = self._remote_url + remotepath if localpath is None: (file, localpath) = mkstemp() os.close(file) cmdline = ["rsync", "--copy-links", remote_url, localpath] log.debug("executing %s", " ".join(cmdline)) subprocess.check_call(cmdline, env = self._environment) return localpath
python
9
0.587805
66
44.666667
9
Copy a file from the remote rsync module to the local filesystem. If no local destination is specified in __localpath__, a temporary file is created and its filename returned. The temporary file has to be deleted by the caller.
function
public final class ExtractURIComponentBuilder implements CommandBuilder { @Override public Collection<String> getNames() { return Collections.singletonList("extractURIComponent"); } @Override public Command build(Config config, Command parent, Command child, MorphlineContext context) { return new ExtractURIComponent(this, config, parent, child, context); } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static final class ExtractURIComponent extends AbstractCommand { private final String inputFieldName; private final String outputFieldName; private final boolean failOnInvalidURI; private final Component component; public ExtractURIComponent(CommandBuilder builder, Config config, Command parent, Command child, MorphlineContext context) { super(builder, config, parent, child, context); this.inputFieldName = getConfigs().getString(config, "inputField"); this.outputFieldName = getConfigs().getString(config, "outputField"); this.failOnInvalidURI = getConfigs().getBoolean(config, "failOnInvalidURI", false); this.component = new Validator<Component>().validateEnum( config, getConfigs().getString(config, "component"), Component.class); validateArguments(); } @Override protected boolean doProcess(Record record) { for (Object uriObj : record.get(inputFieldName)) { URI uri; try { uri = new URI(uriObj.toString()); } catch (URISyntaxException e) { LOG.debug("Invalid URI: {}", uriObj); if (failOnInvalidURI) { return false; } continue; } switch (component) { case scheme: { addValue(record, uri.getScheme()); break; } case authority: { addValue(record, uri.getAuthority()); break; } case path: { addValue(record, uri.getPath()); break; } case query: { addValue(record, uri.getQuery()); break; } case fragment: { addValue(record, uri.getFragment()); break; } case host: { addValue(record, uri.getHost()); break; } case port: { addValue(record, uri.getPort()); break; } case schemeSpecificPart: { addValue(record, uri.getSchemeSpecificPart()); break; } case userInfo: { addValue(record, uri.getUserInfo()); break; } default: { throw new IllegalStateException("Illegal component: " + component.name()); // unreachable } } } // pass record to next command in chain: return super.doProcess(record); } private void addValue(Record record, Object value) { if (value != null) { record.put(outputFieldName, value); } } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static enum Component { scheme, authority, path, query, fragment, host, port, schemeSpecificPart, userInfo, } }
java
19
0.519231
128
28.908333
120
/** * A command that extracts a subcomponent from the URIs contained in the given input field and * appends it to the given output fields. */
class
def _check_initiator(self, ininame): cli_cmd = ('showiscsiini -ini %(name)s' % {'name': ininame}) out = self._execute_cli(cli_cmd) if out.find('Initiator Information') > -1: return True else: return False
python
10
0.512545
50
34
8
Check whether the initiator is already added.
function
private void hasIncreased(long master_id, long slave_id) { if (toastPref == false) { String master = getCountFromId(master_id).count.name; String slave = getCountFromId(slave_id).count.name; Toast.makeText(CountingActivity.this, String.format(getString(R.string.postIncr),master,slave), Toast.LENGTH_SHORT).show(); } }
java
15
0.698864
129
38.222222
9
/* * Pop up various exciting messages if the user has not bothered to turn them off in the * settings... */
function
public class SortableCollection<T> : ISortableCollection<T> where T : class, ISortableObject { private readonly List<T> _items = new List<T>(); private readonly List<T> _changeCache = new List<T>(); public T this[int index] { get { return _items[index]; } } public void Add(T item) { _items.Add(item); item.SortOrder = _items.Count > 0 ? _items.Max(i => i.SortOrder) + 1 : 1; } public bool Remove(T item) { var removeResult = _items.Remove(item); ReorderItems(); return removeResult; } public ICollection<T> FlushModifications() { var modifications = _changeCache.ToList(); _changeCache.Clear(); return modifications; } public void MoveUp(T item) { var itemIndex = GetItemIndex(item); if (itemIndex == 0) return; Move(item, 1); } public void MoveDown(T item) { var itemIndex = GetItemIndex(item); if (itemIndex == _items.Count - 1) return; Move(item, -1); } private void Move(T item, int offset) { var targetIndex = _items.IndexOf(item) - offset; var targetItem = _items[targetIndex]; _items.Remove(item); _items.Insert(targetIndex, item); item.SortOrder = targetItem.SortOrder; targetItem.SortOrder = item.SortOrder + offset; AddItemsToModificationCache(new[] { item, targetItem }); } public void InsertBefore(T item, T itemToInsert) { var itemIndex = GetItemIndex(item); _items.Insert(itemIndex, itemToInsert); ReorderItems(); } public void InsertAfter(T item, T itemToInsert) { var itemIndex = GetItemIndex(item); _items.Insert(itemIndex + 1, itemToInsert); ReorderItems(); } public int Count { get { return _items.Count; } } bool ICollection<T>.IsReadOnly { get { return false; } } public IEnumerator<T> GetEnumerator() { return _items.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public void Clear() { _items.Clear(); } public bool Contains(T item) { return _items.Contains(item); } public void CopyTo(T[] array, int arrayIndex) { _items.CopyTo(array, arrayIndex); } public int IndexOf(T item) { return _items.IndexOf(item); } <summary> Returns the index of an item, if and only if the item is existent in the underlying list </summary> private int GetItemIndex(T item) { var itemIndex = _items.IndexOf(item); if (itemIndex < 0) throw new ArgumentException("Item was not found in underlying collection.", "item"); return itemIndex; } <summary> Will check the cache for the given items and will add them if they are not in the list </summary> private void AddItemsToModificationCache(IEnumerable<T> items) { foreach (var item in items.Where(item => !_changeCache.Contains(item))) { _changeCache.Add(item); } } <summary> Reorders the underlying list and returns all changed items </summary> private void ReorderItems() { var changedItems = new List<T>(); var index = 0; foreach (var item in _items) { var sortIndex = index + 1; if (item.SortOrder != sortIndex) { item.SortOrder = sortIndex; changedItems.Add(item); } index++; } _items.Sort((a, b) => a.SortOrder.CompareTo(b.SortOrder)); AddItemsToModificationCache(changedItems); } }
c#
15
0.505548
100
31.291045
134
/// <summary> /// Special collection to handle positions of elements within this collection /// </summary> /// <typeparam name="T">The type of the elements whithin this collection</typeparam>
class
private Task OnMessageReceivedAsync(INetworkPeer peer, IncomingMessage message) { switch (message.Message.Payload) { case VersionPayload version: if (this.IsProvenHeaderActivated() && !this.CanServeProvenHeader(version)) { this.logger.LogDebug("Peer '{0}' has been banned because can't serve proven headers. Peer Version: {1}", peer.RemoteSocketEndpoint, version.Version); this.peerBanning.BanAndDisconnectPeer(peer.PeerEndPoint, "Can't serve proven headers."); } break; } return Task.CompletedTask; }
c#
14
0.562764
173
49.714286
14
/// <summary> /// Event handler that is called when the node receives a network message from the attached peer. /// </summary> /// <param name="peer">Peer that sent us the message.</param> /// <param name="message">Received message.</param> /// <remarks> /// This handler only cares about "version" messages, which are only sent once per node. /// </remarks> /// <returns>A <see cref="Task"/> representing the asynchronous operation.</returns>
function
public class PointDataImpl implements PointData { private ExtensionPoint extPoint; /** * Construct a new implementation of PointData with the given * extension point as the backing storage for data. */ public PointDataImpl(ExtensionPoint extensionPoint) { this.extPoint = extensionPoint; } /** * Sets the geo-location of the entity based on the lat and long coordinates * passed in. * * @param lat The latitude coordinate, between -90 and 90 degrees. * @param lon The longitude coordinate, between -180 and 180 degrees. * @throws IllegalArgumentException if the latitude and longitude coordinates * are invalid. */ public void setGeoLocation(Double lat, Double lon) throws IllegalArgumentException { setGeoLocation(new GeoRssWhere(lat, lon)); } /** * Sets the geo-location of the entity based on the Point extension. * * @param point A point containing the latitude and longitude coordinates. */ public void setGeoLocation(Point point) { setPoint(extPoint, point); } /** * Gets the geo-location of the entity. * @return a Point that contains the geo-coordinates (latitude and longitude). */ public Point getGeoLocation() { return getPoint(extPoint); } public void clearPoint() { clearPoint(extPoint); } /* * Declare the extensions that are used for storing Point information. */ public void declareExtensions(ExtensionProfile extProfile) { Class<? extends ExtensionPoint> extClass = extPoint.getClass(); // Declare all all Point implementations here so they are parsable // in the context of extClass. // Declare our various point extensions, none are repeatable. declare(extProfile, extClass, W3CPoint.getDefaultDescription(false)); declare(extProfile, extClass, GeoRssPoint.getDefaultDescription(false)); declare(extProfile, extClass, GeoRssWhere.getDefaultDescription(false)); new W3CPoint().declareExtensions(extProfile); new GeoRssWhere().declareExtensions(extProfile); } /** * Helper method to add the description to the BaseEntry as well as to the * extension class, so auto extension works properly. */ private void declare(ExtensionProfile extProfile, Class<? extends ExtensionPoint> extClass, ExtensionDescription desc) { extProfile.declare(extClass, desc); if (BaseEntry.class.isAssignableFrom(extClass)) { extProfile.declare(BaseEntry.class, desc); } } /** * Sets the geo point of the extension passed in. This will first try to * replace any existing point information. If there is no existing point, then * it will simply add the point extension. * * @param ext The extension point to add the Point to. * @param point The new point information. */ public static void setPoint(ExtensionPoint ext, Point point) { Point existing = getPointExtension(ext); if (existing != null) { Double lat = point != null ? point.getLatitude() : null; Double lon = point != null ? point.getLongitude() : null; existing.setGeoLocation(lat, lon); } else if (point != null) { ext.setExtension(point); } } /** * Iterates through all the extension points and finds the first matching * Point extension. * * NOTE(pingc): Made package private for testing. DO NOT USE. * * @param ext The extension point to search through. * @return A point extension point. This includes GeoRssWhere that may only * contain a box. */ static Point getPointExtension(ExtensionPoint ext) { for (Extension e : ext.getExtensions()) { if (e instanceof Point) { return (Point) e; } } return null; } /** * Helper method to retrieve the Box extension point. * @param ext The containing extension point. * @return An extension point that implements the Box interface and contain * box information. */ public static Point getPoint(ExtensionPoint ext) { Point p = getPointExtension(ext); if (p != null) { if (p instanceof GeoRssWhere) { GeoRssWhere geoWhere = (GeoRssWhere)p; if (geoWhere.hasPoint()) { return geoWhere; } } else { return p; } } return null; } /** * Removes the first Point extension found on the extension point. If the * point extension is a GeoRssWhere, it will only remove the GeoRssWhere * extension if it does not contain a Box/Envelope extension. * * @param ext The extension point from which to clear the Box extension. */ public static void clearPoint(ExtensionPoint ext) { Point p = getPointExtension(ext); if (p != null) { if (p instanceof GeoRssWhere) { GeoRssWhere where = (GeoRssWhere)p; if (where.hasBox()) { // If the GeoRssWhere has a box, just clear the point, otherwise // remove the whole thing. where.clearPoint(); return; } } ext.removeExtension(p); } } }
java
14
0.671884
80
30.55
160
/** * Implementation of the PointData interface. Currently only supports * a non-repeating Point extension. This class uses an {@link ExtensionPoint} * that is passed in to store the Point extension. * * */
class
public class RepeatedReadOptimizer { /** * The compiler generates multiple GetSourceValue for the same kind of read, in case it's a * repeated one (same source reference in multiple parts of the script). Trying to reduce them * all to one causes compile failures, so left them as is, and hid the ugly part in this class. */ Map<GetSourceValue, List<GetSourceValue>> sourceValues = new LinkedHashMap<>(); /** * Adds a source value reference * * @param node */ public void add(GetSourceValue node) { List<GetSourceValue> sourceValues = this.sourceValues.computeIfAbsent(node, f -> new ArrayList<>()); sourceValues.add(node); } /** * Declares a local variable for each read that is repeated at least once, creating local * variables, and making the {@link GetSourceValue} instances emit the new local variable * reference. Please call {@link #resetVariables()} to allow re-using the script one more time. */ public void declareRepeatedReads(SourceWriter w) { for (GetSourceValue sourceValue : sourceValues.keySet()) { List<GetSourceValue> valuesList = sourceValues.get(sourceValue); if (valuesList.size() > 1) { ImagePos pos = sourceValue.getPos(); Expression band = pos.getBand().getIndex(); Expression x = pos.getPixel().getX(); Expression y = pos.getPixel().getY(); SourceWriter varWriter = new SourceWriter(w.getRuntimeModel()); if (band instanceof ScalarLiteral && isPosition(x) && isPosition(y)) { // prefixes are used to separate variables "sv_" stands for source value varWriter.append("sv_").append(sourceValue.getVarName()).append("_"); x.write(varWriter); varWriter.append("_"); y.write(varWriter); varWriter.append("_"); band.write(varWriter); String variableName = varWriter.getSource().replace("-", "_"); w.indent(); w.append("double ").append(variableName).append(" = "); sourceValue.write(w); w.append(";"); w.newLine(); for (GetSourceValue reference : valuesList) { reference.setVariableName(variableName); } } } } } /** * Checks that the expression is just a proxy to a variable (e.g., <code>_x</code> or <code>_y * </code>) or an absolute reference to a fixed pixel. */ private boolean isPosition(Expression x) { return (x instanceof FunctionCall && ((FunctionCall) x).isProxy()) || x instanceof ScalarLiteral; } /** Resets the local variables references. */ public void resetVariables() { for (GetSourceValue sourceValue : sourceValues.keySet()) { List<GetSourceValue> valuesList = sourceValues.get(sourceValue); if (valuesList.size() > 1) { for (GetSourceValue reference : valuesList) { reference.setVariableName(null); } } } } }
java
17
0.565557
99
41.202532
79
/** * Support class that helps avoiding repeated reads on the source images, for the common case where * the source image reference is the current pixel (no offsets, no absolute references). */
class
async def upload(self, file: [str, BufferedReader], face_box_id: int = None, timeout: float = 180) -> [Profiles, BytesIO]: data = FormData() if file.startswith("http"): async with self._session.get(file, headers=self.headers) as response: file = await response.read() data.add_field("uploaded_photo", file, filename=f"{random_string()}.png", content_type="image/png") else: data.add_field("uploaded_photo", open(file, "rb"), filename=f"{random_string()}.png", content_type="image/png") async with self._session.post("https://findclone.ru/upload2", data=data, headers=self.headers, timeout=timeout) as response: resp = await response.json() if resp.get("faceBoxes"): if face_box_id is not None: async with self._session.get("https://findclone.ru/upload3", params={"id": face_box_id}, headers=self.headers) as response2: resp = await self.__builder(response2) return resp else: img_bytes = paint_boxes(file, resp) return img_bytes resp = await self.__builder(response) return resp
python
19
0.499651
115
54.153846
26
*coro upload image or image url and return Profiles object or BytesIO object :param file: image direct download link or path :param face_box_id: OPTIONAL, send facebox id if 2 or more faces are detected :param timeout: OPTIONAL - max timeout delay :return: Profiles object or BytesIO if 2 or more faces are detected
function
public List<String> parse(final String sentence) { if (sentence.isEmpty()) { return emptyList(); } if (getSeparator().isEmpty()) { return splitByTitleCharacters(sentence); } return splitBySeparator(sentence); }
java
8
0.700441
50
24.333333
9
/** * Splits the given sentence into words using the separator. If the separator * string is empty, the sentence is splitted by title characters. A leading * title character does not generate an empty leading word. * * @param sentence the sentence to be splitted * @return list of splitted words */
function
public class ServiceEndpoint implements FastExternalizable { /** * Identifies the type of socket factory. * * @see #writeFastExternal FastExternalizable format */ public enum SocketFactoryType implements FastExternalizable { CLEAR(0), SSL(1); private static final SocketFactoryType[] VALUES = values(); private SocketFactoryType(int ordinal) { if (ordinal != ordinal()) { throw new IllegalArgumentException("Wrong ordinal"); } } public static SocketFactoryType readFastExternal( DataInput in, @SuppressWarnings("unused") short serialVersion) throws IOException { final int ordinal = in.readByte(); try { return VALUES[ordinal]; } catch (ArrayIndexOutOfBoundsException e) { throw new IOException( "Wrong value for SocketFactoryType: " + ordinal, e); } } /** * Writes this object to the output stream. Format: * <ol> * <li> ({@code byte}) <i>value</i> // {@link #CLEAR}=0, {@link #SSL}=1 * </ol> */ @Override public void writeFastExternal(DataOutput out, short serialVersion) throws IOException { out.writeByte(ordinal()); } } private final NetworkAddress address; private final DialogType dialogType; private final ClientSocketFactory clientSocketFactory; /** * Creates a service endpoint. * * @param address the network address * @param dialogType the dialogType * @param clientSocketFactory the client socket factory */ public ServiceEndpoint(NetworkAddress address, DialogType dialogType, ClientSocketFactory clientSocketFactory) { this.address = checkNull("address", address); this.dialogType = checkNull("dialogType", dialogType); this.clientSocketFactory = checkNull("clientSocketFactory", clientSocketFactory); } /** * Creates a service endpoint from an input stream. * * @param in the input stream * @param serialVersion the version of the serialized form */ public ServiceEndpoint(DataInput in, short serialVersion) throws IOException { address = new NetworkAddress(in, serialVersion); dialogType = DialogType.readFastExternal(in, serialVersion); final SocketFactoryType socketFactoryType = SocketFactoryType.readFastExternal(in, serialVersion); switch (socketFactoryType) { case CLEAR: clientSocketFactory = new ClearClientSocketFactory(in, serialVersion); break; case SSL: clientSocketFactory = new SSLClientSocketFactory(in, serialVersion); break; default: throw new AssertionError(); } } /** * Writes this object to the output stream. Format: * <ol> * <li> ({@link NetworkAddress}) {@link #getNetworkAddress address} * <li> ({@link DialogType}) {@link #getDialogType dialogType} * <li> ({@link SocketFactoryType}) <i>socket factory type</i> * <li> ({@link ClientSocketFactory}) {@link #getClientSocketFactory * clientSocketFactory} * </ol> */ @Override public void writeFastExternal(DataOutput out, short serialVersion) throws IOException { address.writeFastExternal(out, serialVersion); dialogType.writeFastExternal(out, serialVersion); getSocketFactoryType().writeFastExternal(out, serialVersion); clientSocketFactory.writeFastExternal(out, serialVersion); } /** * Returns type of socket factory supported by the endpoint. * * @return the socket factory type */ private SocketFactoryType getSocketFactoryType() { return (clientSocketFactory instanceof SSLClientSocketFactory) ? SocketFactoryType.SSL : SocketFactoryType.CLEAR; } /** * Returns the network address of the endpoint. * * @return the network address */ public NetworkAddress getNetworkAddress() { return address; } /** * Returns the dialog type of the endpoint. * * @return the dialog type */ public DialogType getDialogType() { return dialogType; } /** * Returns the client socket factory to use for connecting to the endpoint. * * @return the client socket factory */ public ClientSocketFactory getClientSocketFactory() { return clientSocketFactory; } @Override public boolean equals(Object object) { if (object instanceof ServiceEndpoint) { final ServiceEndpoint endpoint = (ServiceEndpoint) object; return address.equals(endpoint.address) && dialogType.equals(endpoint.dialogType) && clientSocketFactory.equals(endpoint.clientSocketFactory); } return false; } @Override public int hashCode() { final int prime = 7; int value = 11; value = (value * prime) + address.hashCode(); value = (value * prime) + dialogType.hashCode(); value = (value * prime) + clientSocketFactory.hashCode(); return value; } @Override public String toString() { return "ServiceEndpoint[address=" + address + " dialogType=" + dialogType + " clientSocketFactory=" + clientSocketFactory + "]"; } }
java
16
0.606797
79
30.555556
180
/** * A service endpoint that includes the information needed to establish dialogs * with a remote server that implements an asynchronous interface. * * @see ServiceRegistryAPI * @see #writeFastExternal FastExternalizable format */
class
private double _distance_circle_to_line(Line3d l, out Point3d p1, out Point3d p2) { if (l.IsParallelTo(this)) { p2 = this.Center.ProjectionTo(l); p1 = this.ClosestPoint(p2); return p1.DistanceTo(p2); } object obj = l.IntersectionWith(this); if (obj != null) { p1 = (Point3d)obj; p2 = p1; return 0; } return _distance_circle_boundary_to_line(l, out p1, out p2); }
c#
12
0.447368
81
32.588235
17
/// <summary> /// Shortest distance between line and circle (including interior points) /// </summary> /// <param name="l">Target line</param> /// <param name="p1">Closest point on circle</param> /// <param name="p2">Closest point on line</param>
function
public class AdfDisableTaskMaster { private readonly TaskMetaDataDatabase _taskMetaDataDatabase; public AdfDisableTaskMaster(TaskMetaDataDatabase taskMetaDataDatabase) { _taskMetaDataDatabase = taskMetaDataDatabase; } [FunctionName("DisableTaskMaster")] public IActionResult Run( [HttpTrigger(AuthorizationLevel.Function, "post", Route = null)] HttpRequest req, ILogger log, ExecutionContext context) { Guid executionId = context.InvocationId; FrameworkRunner frp = new FrameworkRunner(log, executionId); FrameworkRunnerWorkerWithHttpRequest worker = DisableTaskMasterCore; FrameworkRunnerResult result = frp.Invoke(req, "DisableTaskMaster", worker); if (result.Succeeded) { return new OkObjectResult(JObject.Parse(result.ReturnObject)); } else { return new BadRequestObjectResult(new { Error = "Execution Failed...." }); } } public JObject DisableTaskMasterCore(HttpRequest req, Logging.Logging logging) { string requestBody = new StreamReader(req.Body).ReadToEndAsync().Result; dynamic data = JsonConvert.DeserializeObject(requestBody); dynamic taskMasterId = JObject.Parse(data.ToString())["TaskMasterId"]; _taskMetaDataDatabase.ExecuteSql(string.Format(@"Update [dbo].[TaskMaster] SET ActiveYN = '0' Where [TaskMasterId] = {0}", taskMasterId)); JObject root = new JObject { ["Result"] = "Complete" }; return root; } }
c#
16
0.611914
150
43.358974
39
/// <summary> /// The purpose of this function is to disable a TaskMaster object. /// </summary> // ReSharper disable once UnusedMember.Global
class
@Override public ResourceBundle resolveResourceBundle(final Class<?> controllerClass, final Locale locale) { final MessageSource messageSource = applicationContext.getBean(MessageSource.class); final AFXController afxController = AnnotationUtils.findAnnotation(controllerClass, AFXController.class); if (afxController == null || "".equals(afxController.resourcesBasename())) { return new MessageSourceResourceBundle(messageSource, locale); } final String baseName = afxController.resourcesBasename(); return ResourceBundle.getBundle(baseName, locale); }
java
10
0.807018
107
56.1
10
/** * Resolves to a {@link ResourceBundle} for the given {@code controllerClass}. * <p> * The logic is as follows: In case the controller class does not hold a * resource base name under {@link AFXController#resourcesBasename()}, it is * expected that Spring's central {@link MessageSource} holds all * internationalized texts. In that case we wrap the {@link MessageSource} in a * {@link MessageSourceResourceBundle}, so that it can be accessed by JavaFX. * <p> * In case a base name is provided under * {@link AFXController#resourcesBasename()}, then the regular * {@link ResourceBundle} is returned as it would be in a non-Spring setup. * */
function
def picatrix_helper(function: Callable[..., Any]) -> Callable[..., Any]: typing_hints = typing.get_type_hints(function) manager.MagicManager.register_helper( name=function.__name__, helper=function, typing_help=typing_hints) try: _ = utils.ipython_get_global(function.__name__) except KeyError: utils.ipython_bind_global(function.__name__, function) return function
python
10
0.70437
72
42.333333
9
Decorator to register a picatrix helper. Args: function (function): if the decorator is called without any arguments the helper function is passed to the decorator. Returns: The function that was passed in.
function
public Quaternion slerp(Quaternion qb, double t) { if (t == 0) { return this; } else if (t == 1) { return this.copyFrom(qb); } double x = this.x, y = this.y, z = this.z, w = this.w; double cosHalfTheta = w * qb.w + x * qb.x + y * qb.y + z * qb.z; if (cosHalfTheta < 0) { this.w = -qb.w; this.x = -qb.x; this.y = -qb.y; this.z = -qb.z; cosHalfTheta = -cosHalfTheta; } else { this.copyFrom(qb); } if (cosHalfTheta >= 1.0) { this.w = w; this.x = x; this.y = y; this.z = z; return this; } double halfTheta = Math.acos(cosHalfTheta); double sinHalfTheta = Math.sqrt(1.0 - cosHalfTheta * cosHalfTheta); if (Math.abs(sinHalfTheta) < 0.001) { this.w = 0.5 * (w + this.w); this.x = 0.5 * (x + this.x); this.y = 0.5 * (y + this.y); this.z = 0.5 * (z + this.z); return this; } double ratioA = Math.sin((1 - t) * halfTheta) / sinHalfTheta, ratioB = Math.sin(t * halfTheta) / sinHalfTheta; this.w = (w * ratioA + this.w * ratioB); this.x = (x * ratioA + this.x * ratioB); this.y = (y * ratioA + this.y * ratioB); this.z = (z * ratioA + this.z * ratioB); return this; }
java
11
0.553339
69
27.146341
41
/** * Handles the spherical linear interpolation between this quaternion's configuration and that of qb. t represents * how close to the current (0) or target (1) rotation the result should be. * * @param qb Target quaternion rotation. * @param t Normalized [0..1] interpolation factor. * @return this */
function
public final class ScriptRunner { private ScriptRunner() {} /** * Runs the specified test template to generate a set of test programs. * * @param options Options that set up the run configuration. * @param templateFile Test template to be run. * * @throws Throwable if any issues occurred during the script run. A special case is * {@link ru.ispras.microtesk.test.GenerationAbortedException} which means that some * of the engines invoked by the script decided to abort generation. */ public static void run(final Options options, final String templateFile) throws Throwable { final String extension = FileUtils.getFileExtension(templateFile).toLowerCase(); if (".rb".equals(extension)) { RubyRunner.run(options, templateFile); } else if (".py".equals(extension)) { PythonRunner.run(options, templateFile); } else { throw new GenerationAbortedException( String.format("Unsupported template file extension: %s.", extension)); } } }
java
14
0.697381
94
38.692308
26
/** * The {@link ScriptRunner} class runs test template scripts with corresponding scripting engines. * * @author <a href="mailto:[email protected]">Andrei Tatarnikov</a> */
class
def generate_report( results: Union[ReferenceResult, List[ReferenceResult]], show_only: Optional[str] = None, fill_empty: bool = True ) -> str: if isinstance(results, ReferenceResult): results = [results] if not isinstance(results, list) or not all(isinstance(r, ReferenceResult) for r in results): raise TypeError("Cannot generate a report from arguments that are not reference result objects") if show_only not in {"satisfied", "unsatisfied", None}: raise ValueError("show_only must be in {'satisfied', 'unsatisfied', None}") filtered = [] for res in results: if res.correct and show_only != "unsatisfied": filtered.append(res) elif not res.correct and show_only != "satisfied": filtered.append(res) if len(filtered) == 0 and fill_empty: filtered = results report = "" for i, res in enumerate(filtered): report += f"REFERENCE: {res.name}\n" report += f"SATISFIED: {res.correct}" if res.messages: report += f"\nMESSAGES:\n" report += indent("\n".join(res.messages), " - ") if i != len(filtered) - 1: report += "\n\n" return report
python
15
0.612664
104
42.464286
28
Collects a series of reference result objects and returns a summary of these results with any messages from the annotations. ``show_only`` should be in the set ``{'satisfied', 'unsatisfied', None}``. If ``"satisfied"``, the summary will contain only the results of satisfied reference implementations. If ``"unsatisfied"``, the summary will contain only the results of unsatisfied reference implementations. If ``None``, all reference implementations will be included. If ``show_only`` is set to a value that would result in an empty report (e.g. it is set to ``"satisfied"`` but no reference was satisfied) and ``fill_empty`` is ``True``, the report will be filled with any references that would normally be excluded by ``show_only``. Args: results (``Union[ReferenceResult, list[ReferenceResult]]``): the result(s) being collected show_only (``{'satisfied', 'unsatisfied', None}``): which results to report fill_empty (``bool``): if the resulting report would be empty, include results of the type not specified by ``show_only`` Returns: ``str``: the summary report
function
final class PriorityHeapTimerService implements TimerService { private static final Timer[] EMPTY_TIMERS = new Timer[0]; private static final int MIN_CAPACITY = 8; private final TimerHandler timerHandler; private final Long2ObjectHashMap<Timer> timerByCorrelationId = new Long2ObjectHashMap<>(); private Timer[] timers = EMPTY_TIMERS; private Timer[] freeTimers = EMPTY_TIMERS; private int size; private int freeTimerCount; /** * Construct a Priority Heap Timer Service using the supplied handler to * callback for expired timers. * * @param timerHandler to callback when a timer expires. */ PriorityHeapTimerService(final TimerHandler timerHandler) { this.timerHandler = Objects.requireNonNull(timerHandler, "TimerHandler"); } /** * Poll for expired timers, firing the callback supplied in the constructor. * * @param now current time. * @return the number of expired timers */ public int poll(final long now) { int expiredTimers = 0; final Timer[] timers = this.timers; final TimerHandler timerHandler = this.timerHandler; while (size > 0 && expiredTimers < POLL_LIMIT) { final Timer timer = timers[0]; if (timer.deadline > now) { break; } if (!timerHandler.onTimerEvent(timer.correlationId)) { break; } expiredTimers++; final int lastIndex = --size; final Timer lastTimer = timers[lastIndex]; timers[lastIndex] = null; if (0 != lastIndex) { shiftDown(timers, lastIndex, 0, lastTimer); } timerByCorrelationId.remove(timer.correlationId); addToFreeList(timer); } return expiredTimers; } /** * {@inheritDoc} */ public void scheduleTimerForCorrelationId(final long correlationId, final long deadline) { final Timer existingTimer = timerByCorrelationId.get(correlationId); if (null != existingTimer) { if (deadline < existingTimer.deadline) { existingTimer.deadline = deadline; shiftUp(timers, existingTimer.index, existingTimer); } else if (deadline > existingTimer.deadline) { existingTimer.deadline = deadline; shiftDown(timers, size, existingTimer.index, existingTimer); } } else { ensureCapacity(size + 1); final int index = size++; final Timer timer; if (freeTimerCount > 0) { final int freeIndex = --freeTimerCount; timer = freeTimers[freeIndex]; freeTimers[freeIndex] = null; timer.reset(correlationId, deadline, index); } else { timer = new Timer(correlationId, deadline, index); } timerByCorrelationId.put(correlationId, timer); shiftUp(timers, index, timer); } } /** * {@inheritDoc} */ public boolean cancelTimerByCorrelationId(final long correlationId) { final Timer removedTimer = timerByCorrelationId.remove(correlationId); if (null == removedTimer) { return false; } final int lastIndex = --size; final Timer lastTimer = timers[lastIndex]; timers[lastIndex] = null; if (lastIndex != removedTimer.index) { shiftDown(timers, lastIndex, removedTimer.index, lastTimer); if (timers[removedTimer.index] == lastTimer) { shiftUp(timers, removedTimer.index, lastTimer); } } addToFreeList(removedTimer); return true; } /** * {@inheritDoc} */ public void snapshot(final TimerSnapshotTaker snapshotTaker) { final Timer[] timers = this.timers; for (int i = 0, size = this.size; i < size; i++) { final Timer timer = timers[i]; snapshotTaker.snapshotTimer(timer.correlationId, timer.deadline); } } /** * {@inheritDoc} */ public void currentTime(final long now) { } void forEach(final Consumer<PriorityHeapTimerService.Timer> consumer) { final Timer[] timers = this.timers; for (int i = 0, size = this.size; i < size; i++) { consumer.accept(timers[i]); } } private static void shiftUp(final Timer[] timers, final int startIndex, final Timer timer) { int index = startIndex; while (index > 0) { final int prevIndex = (index - 1) >>> 1; final Timer prevTimer = timers[prevIndex]; if (timer.deadline >= prevTimer.deadline) { break; } timers[index] = prevTimer; prevTimer.index = index; index = prevIndex; } timers[index] = timer; timer.index = index; } private static void shiftDown(final Timer[] timers, final int size, final int startIndex, final Timer timer) { final int half = size >>> 1; int index = startIndex; while (index < half) { int nextIndex = (index << 1) + 1; final int right = nextIndex + 1; Timer nextTimer = timers[nextIndex]; if (right < size && nextTimer.deadline > timers[right].deadline) { nextIndex = right; nextTimer = timers[nextIndex]; } if (timer.deadline < nextTimer.deadline) { break; } timers[index] = nextTimer; nextTimer.index = index; index = nextIndex; } timers[index] = timer; timer.index = index; } private void ensureCapacity(final int requiredCapacity) { final int currentCapacity = timers.length; if (requiredCapacity > currentCapacity) { if (requiredCapacity > ArrayUtil.MAX_CAPACITY) { throw new IllegalStateException("max capacity reached: " + ArrayUtil.MAX_CAPACITY); } if (EMPTY_TIMERS == timers) { timers = new Timer[MIN_CAPACITY]; freeTimers = new Timer[MIN_CAPACITY]; } else { int newCapacity = currentCapacity + (currentCapacity >> 1); if (newCapacity < 0 || newCapacity > ArrayUtil.MAX_CAPACITY) { newCapacity = ArrayUtil.MAX_CAPACITY; } timers = Arrays.copyOf(timers, newCapacity); freeTimers = Arrays.copyOf(freeTimers, newCapacity); } } } private void addToFreeList(final Timer timer) { timer.reset(Aeron.NULL_VALUE, Aeron.NULL_VALUE, Aeron.NULL_VALUE); freeTimers[freeTimerCount++] = timer; } static final class Timer { long correlationId; long deadline; int index; Timer(final long correlationId, final long deadline, final int index) { reset(correlationId, deadline, index); } void reset(final long correlationId, final long deadline, final int index) { this.correlationId = correlationId; this.deadline = deadline; this.index = index; } public String toString() { return "PriorityHeapTimerService.Timer{" + "correlationId=" + correlationId + ", deadline=" + deadline + ", index=" + index + '}'; } } }
java
16
0.538164
112
27.491103
281
/** * Implementation of the {@link TimerService} that uses a priority heap to order the timestamps. * * <p> * <b>Caveats</b> * <p> * Timers with the same deadline are not be ordered with one another. In contrast, the timers with different deadlines * are guaranteed to expire in order even after Cluster restart, i.e. when the deadlines are in the past. * <p> * <b>Note:</b> Not thread safe. */
class
function _getCombinedPerimeters (cycles) { for (let ci = 0; ci < cycles.length; ci++) { const cycle = cycles[ci]; const perimCycle = cycle.perimeterCycle; perimCycle.marked = false; } const combinedPerimeters = []; for (let ci = 0; ci < cycles.length; ci++) { const cycle = cycles[ci]; const perimCycle = cycle.perimeterCycle; if (!perimCycle.marked) { perimCycle.marked = true; combinedPerimeters.push(perimCycle.boundaries); } } return combinedPerimeters; }
javascript
11
0.659491
53
29.117647
17
/** * Gets the perimeter boundaries of a group of cycles, which we already * calculated. */
function
bool ServerSocket::accept(Socket &sockClient, int timeout) { struct pollfd pfd; bool bRet = false; pfd.fd = mServer.getSocket(); pfd.events = POLLIN | POLLPRI; pfd.revents = 0; int r = ::poll(&pfd, 1, timeout); if (r == -1) { bRet = false; } else if (r == 0) { bRet = false; } else { bRet = mServer.accept(sockClient); } return bRet; }
c++
12
0.544554
60
24.3125
16
/** * @brief Listens for a connection to be made to this socket and accepts it. * * Listens for a connection to be made to this socket and accepts it. * The method blocks until timeout is elapsed or a socket arrives. * A new Socket4 is created. * * @param sockClient Accepted socket just created. Can be Socket4 or Socket6, * it is Socket Server dependent. * @param timeout Timeout to be reached. * * @returns true if the function is successful; otherwise false. */
function
class Solution { public: ListNode* mergeKLists (std::vector<ListNode*> &lists) { auto it {lists.begin()}; while (it != lists.end() && !(*it)) { ++it; } ListNode *list {}; ListNode *last {}; std::vector<int> vector; if (it != lists.end()) { last = list = *it++; vector.push_back(last->val); while (last->next) { last = last->next; vector.push_back(last->val); } } while (it != lists.end()) { if (*it) { last = last->next = *it; vector.push_back(last->val); while (last->next) { last = last->next; vector.push_back(last->val); } } ++it; } if (list) { std::sort(vector.begin(), vector.end()); auto it {vector.begin()}; last = list; while (last) { last->val = *it++; last = last->next; } } return list; } }
c++
15
0.441909
55
17.921569
51
/** * Definition for singly-linked list. * struct ListNode { * int val; * ListNode *next; * ListNode() : val(0), next(nullptr) {} * ListNode(int x) : val(x), next(nullptr) {} * ListNode(int x, ListNode *next) : val(x), next(next) {} * }; */
class
async function calculateInputs(client, seed, initialAddressState, nextAddressPath, outputs, zeroCount = 5) { const localClient = typeof client === "string" ? new SingleNodeClient(client) : client; let requiredBalance = 0; for (const output of outputs) { requiredBalance += output.amount; } let consumedBalance = 0; const inputsAndSignatureKeyPairs = []; let finished = false; let zeroBalance = 0; do { const path = nextAddressPath(initialAddressState); const addressSeed = seed.generateSeedFromPath(new Bip32Path(path)); const addressKeyPair = addressSeed.keyPair(); const ed25519Address = new Ed25519Address(addressKeyPair.publicKey); const address = Converter.bytesToHex(ed25519Address.toAddress()); const addressOutputIds = await localClient.addressEd25519Outputs(address); if (addressOutputIds.count === 0) { zeroBalance++; if (zeroBalance >= zeroCount) { finished = true; } } else { for (const addressOutputId of addressOutputIds.outputIds) { const addressOutput = await localClient.output(addressOutputId); if (!addressOutput.isSpent && consumedBalance < requiredBalance) { if (addressOutput.output.amount === 0) { zeroBalance++; if (zeroBalance >= zeroCount) { finished = true; } } else { consumedBalance += addressOutput.output.amount; const input = { type: UTXO_INPUT_TYPE, transactionId: addressOutput.transactionId, transactionOutputIndex: addressOutput.outputIndex }; inputsAndSignatureKeyPairs.push({ input, addressKeyPair }); if (consumedBalance >= requiredBalance) { if (consumedBalance - requiredBalance > 0) { outputs.push({ amount: consumedBalance - requiredBalance, address: addressOutput.output.address.address, addressType: addressOutput.output.address.type }); } finished = true; } } } } } } while (!finished); if (consumedBalance < requiredBalance) { throw new Error("There are not enough funds in the inputs for the required balance"); } return inputsAndSignatureKeyPairs; }
javascript
27
0.506863
108
44.546875
64
/** * Calculate the inputs from the seed and basePath. * @param client The client or node endpoint to calculate the inputs with. * @param seed The seed to use for address generation. * @param initialAddressState The initial address state for calculating the addresses. * @param nextAddressPath Calculate the next address for inputs. * @param outputs The outputs to send. * @param zeroCount Abort when the number of zero balances is exceeded. * @returns The id of the message created and the contructed message. */
function
def larger_definition_of( self, container_data_list: List[Any], second_object_data: Optional[Any] = None ) -> Dict[str, Any]: second_object_is_close_and_inside = ( second_object_data and second_object_data.is_close() and second_object_data.is_inside() ) if ( self.is_inside() and second_object_is_close_and_inside and (not self.containerize_with(second_object_data)) ): pass if self.is_inside() or second_object_is_close_and_inside: return container_data_list[0].larger_definition() if second_object_data and second_object_data.is_close(): return identify_larger_definition( self.larger_definition(), second_object_data.larger_definition() ) return self.larger_definition()
python
10
0.580791
70
39.272727
22
Return the larger (in dimensions) of this object's, the given second object's, or this object's container's trained or untrained definition. Useful if you need to reserve space for the object, since we assume the object's receptacle will always be larger than itself.
function
func (s *SmartContract) WithdrawProposal(ctx contractapi.TransactionContextInterface, proposalID string) error { if proposalID == "" { return fmt.Errorf("the required parameter 'proposalID' is empty") } proposal, err := s.GetProposal(ctx, proposalID) if err != nil { return ErrProposalNotFound } if proposal.Status != Proposed { return fmt.Errorf("the voting is already closed") } mspID, err := s.getMSPID(ctx) if err != nil { return fmt.Errorf("failed to get MSP ID: %v", err) } if proposal.Creator != mspID { return fmt.Errorf("only the proposer (%v) can withdraw the proposal", proposal.Creator) } if err = s.updateStatusToWithdrawn(ctx, *proposal); err != nil { return fmt.Errorf("failed to update the status: %v", err) } return nil }
go
10
0.708497
112
32.304348
23
// WithdrawProposal withdraws the chaincode update proposal. // This only accepts the request from the proposing organization. // This function is only available before the decision of the proposal. // // Arguments: // 0: proposalID - the ID for the chaincode update proposal // // Returns: // 0: error // // Events: // name: withdrawnEvent(<proposalID>) // payload: nil //
function
func (s *Service) GetManagementPort(instanceStatus *InstanceStatus) (*ports.Port, error) { ns, err := instanceStatus.NetworkStatus() if err != nil { return nil, err } mc := metrics.NewMetricPrometheusContext("port", "list") portOpts := ports.ListOpts{ DeviceID: instanceStatus.ID(), FixedIPs: []ports.FixedIPOpts{ { IPAddress: ns.IP(), }, }, Limit: 1, } allPages, err := ports.List(s.networkClient, portOpts).AllPages() if mc.ObserveRequest(err) != nil { return nil, fmt.Errorf("lookup management port for server %s: %w", instanceStatus.ID(), err) } allPorts, err := ports.ExtractPorts(allPages) if err != nil { return nil, err } if len(allPorts) < 1 { return nil, fmt.Errorf("did not find management port for server %s", instanceStatus.ID()) } return &allPorts[0], nil }
go
17
0.679803
94
28.035714
28
// GetManagementPort returns the port which is used for management and external // traffic. Cluster floating IPs must be associated with this port.
function
public void init(Logger rootLogger) { Formatter fastFormatter = new FastFormatter(); for (Handler handler : rootLogger.getHandlers()) { rootLogger.removeHandler(handler); for (String name : RUNTIME_LOGGERS) { Logger logger = Logger.getLogger(name); logger.addHandler(handler); runtimeLoggers.add(logger); } handler.setFormatter(fastFormatter); } for (String name : RUNTIME_LOGGERS) { Logger logger = Logger.getLogger(name); logger.setUseParentHandlers(false); } rootLogger.addHandler(this); }
java
11
0.66609
54
33.058824
17
/** * Initialize the {@code LogHandler} by installing it on the root logger. After this call, log * messages specific to the runtime will be filtered out from being sent to the customer. */
function
def architecture_params_exist(architecture_name, rnn_name, run): if file_exists(neural_net_params_dir+'/{}_{}_{}.json.gz'.format(rnn_name, architecture_name, run)): with open(training_costs_dir+'/{}_{}_{}.txt'.format(rnn_name, architecture_name, run), 'r', encoding='utf-8') as f: validation_costs = [ float(validation_cost) for (epoch, training_cost, validation_cost) in [ line.split('\t') for line in f.read().strip().split('\n')[1:] ] ] if len(validation_costs) == max_epochs + 1: return True final_validation_costs = validation_costs[-(early_stop_patience+1):] if len(final_validation_costs) == early_stop_patience+1: return all(final_validation_costs[0] < other_cost for other_cost in final_validation_costs[1:]) else: return False else: return False
python
20
0.629802
170
65.153846
13
Check if a fully trained parameters file exists for a requested architecture. If file exists then this function will check the training costs file to check that a termination criteria has been reached by early stopping.
function
public bool Contains(Key key) { EnsureOwnerIsNotRemoved(); if (key==null || !Field.ItemType.IsAssignableFrom(key.TypeInfo.UnderlyingType)) { return false; } return Session.LookupStateInCache(key, out var entityState) ? Contains(key, entityState.TryGetEntity()) : Contains(key, null); }
c#
12
0.648094
87
33.2
10
/// <summary> /// Determines whether <see cref="EntitySetBase"/> contains the specified <see cref="Key"/>. /// </summary> /// <param name="key">The key.</param> /// <returns> /// <see langword="true"/> if <see cref="EntitySetBase"/> contains the specified <see cref="Key"/>; otherwise, <see langword="false"/>. /// </returns> /// <exception cref="InvalidOperationException">Entity type is not supported.</exception>
function
public void scrollTo(Figure figure) { if (figure == null) { return; } if (fFocusVehicle == null) { fFocusFigure = figure; } scrollRectToVisible(computeVisibleRectangleForFigure(figure)); repaint(); }
java
8
0.628692
66
22.8
10
/** * Scrolls to the given figure. Normally called when the user clicks on * a model component in the TreeView and wants to see the corresponding * figure. * * @param figure The figure to be scrolled to. */
function
def _fix_up_media_upload(method_desc, root_desc, path_url, parameters): media_upload = method_desc.get('mediaUpload', {}) accept = media_upload.get('accept', []) max_size = _media_size_to_long(media_upload.get('maxSize', '')) media_path_url = None if media_upload: media_path_url = _media_path_url_from_info(root_desc, path_url) parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy() if 'body' in parameters: parameters['body']['required'] = False return accept, max_size, media_path_url
python
12
0.683616
72
47.363636
11
Updates parameters of API by adding 'media_body' if supported by method. SIDE EFFECTS: If the method supports media upload and has a required body, sets body to be optional (required=False) instead. Also, if there is a 'mediaUpload' in the method description, adds 'media_upload' key to parameters. Args: method_desc: Dictionary with metadata describing an API method. Value comes from the dictionary of methods stored in the 'methods' key in the deserialized discovery document. root_desc: Dictionary; the entire original deserialized discovery document. path_url: String; the relative URL for the API method. Relative to the API root, which is specified in the discovery document. parameters: A dictionary describing method parameters for method described in method_desc. Returns: Triple (accept, max_size, media_path_url) where: - accept is a list of strings representing what content types are accepted for media upload. Defaults to empty list if not in the discovery document. - max_size is a long representing the max size in bytes allowed for a media upload. Defaults to 0L if not in the discovery document. - media_path_url is a String; the absolute URI for media upload for the API method. Constructed using the API root URI and service path from the discovery document and the relative path for the API method. If media upload is not supported, this is None.
function
[Info(UserLevel.User, "When another node enters this radius, it is displaced in the direction of the angle without affecting its velocity.", CompType)] public class Displace : Component, ILinkable, IMultipliable { public const mtypes CompType = mtypes.affectother; public override mtypes compType { get { return CompType; } set { } } [Info(UserLevel.User, "Radius at which other nodes are affected.")] public float radius { get; set; } [Info(UserLevel.Advanced, "Represents minimum distance taken into account when calculating push away.")] public int lowerbound { get; set; } [Info(UserLevel.User, "Changes the angle at which the node displaces the incoming node: 0 pushes away and 180 pulls toward. 90 pushes rightwards and 270 pushes leftwards." )] public int angle { get; set; } [Info(UserLevel.Advanced, "If disabled, the intensity of displacement will vary depending on the distance from the node.")] public bool ConstantPush { get; set; } public Displace() : this(null) {} public Displace(Node parent) { if (parent != null) this.parent = parent; multiplier = 100f; lowerbound = 20; radius = 800f; } [Info(UserLevel.Developer)] public Link link { get; set; } public override void AffectOther(Node other) { if (!active) { return; } if (exclusions.Contains(other)) return; float distVects = Vector2R.Distance(other.body.pos, parent.body.pos); if (distVects < radius) { if (distVects < lowerbound) distVects = lowerbound; double aa = Math.Atan2((parent.body.pos.Y - other.body.pos.Y), (parent.body.pos.X - other.body.pos.X)); float gravForce; if (!ConstantPush) gravForce = multiplier/10f; else gravForce = (multiplier/10f*parent.body.mass*other.body.mass)/(distVects); if (angle != 0) aa = (aa + Math.PI + (Math.PI*(float) (angle/180.0f))%(Math.PI*2)) - Math.PI; float velX = (float) Math.Cos(aa)*gravForce; float velY = (float) Math.Sin(aa)*gravForce; Vector2R delta = new Vector2R(velX, velY); if (!ConstantPush) delta *= other.body.invmass; other.body.pos -= delta; } } [Info(UserLevel.User, "The strength with which the other node will be moved away.")] public float multiplier { get; set; } }
c#
19
0.646887
155
43.333333
54
/// <summary> /// When another node enters this radius, it is displaced in the direction of the angle without affecting its velocity. /// </summary>
class
public string GetStringValue(string key) { foreach(ISN_CKRecordField field in this.Fields) { if (field.Key.Equals(key)) { return field.StringValue; } } return string.Empty; }
c#
10
0.492593
61
32.875
8
/// <summary> /// Method for getting string value for given key. /// If we don't have value for given key, this method will return empty string. /// </summary> /// <param name="key"> Key for this field. </param>
function
public void sourceReads() { this.initialize(); SAMFileReader reader = new SAMFileReader(file); reader.setValidationStringency(ValidationStringency.SILENT); CloseableIterator<SAMRecord> iter = reader.iterator(); Collection<SAMRecord> byRead = new ArrayList<SAMRecord>(); String lastread = null; while (iter.hasNext()) { SAMRecord record = iter.next(); if (record.getReadUnmappedFlag()) {continue; } if (lastread == null || !lastread.equals(record.getReadName())) { processRead(byRead); byRead.clear(); } lastread = record.getReadName(); byRead.add(record); } processRead(byRead); iter.close(); reader.close(); }
java
13
0.672012
71
31.714286
21
/** * Get the reads from the appropriate source (implementation-specific). * Loads data to the fivePrimesList and hitsCountList */
function
public static string RepeatedWord(string inputString) { string temp = ""; HashTable cashTable = new HashTable(); int i = 0; while (i < inputString.Length) { while (i < inputString.Length && (inputString[i] > 'z' || inputString[i] < 'A' || (inputString[i] > 'Z' && inputString[i] < 'a'))) { i++; } while (i < inputString.Length && ((inputString[i] <= 'z' && inputString[i] >= 'a') || (inputString[i] <= 'Z' && inputString[i] >= 'A'))) { temp += inputString[i]; i++; } if (cashTable.Contains(temp)) { return temp; } else { if (temp.Length > 0) cashTable.Add(temp.ToLower(), 1); temp = ""; } if (i == inputString.Length) { i++; } } return null; }
c#
16
0.346596
152
34.375
32
/// <summary> /// Takes in an input string, and returns the first repeated word in the string. /// </summary> /// <param name="inputString"></param> /// <returns> First repeated word </returns>
function
int ImDiskCliChangeFlags(DWORD DeviceNumber, LPCWSTR MountPoint, DWORD FlagsToChange, DWORD Flags) { HANDLE device; DWORD dw; IMDISK_SET_DEVICE_FLAGS device_flags; if (MountPoint == NULL) { device = ImDiskOpenDeviceByNumber(DeviceNumber, GENERIC_READ | GENERIC_WRITE); if (device == INVALID_HANDLE_VALUE) device = ImDiskOpenDeviceByNumber(DeviceNumber, GENERIC_READ); } else { device = ImDiskOpenDeviceByMountPoint(MountPoint, GENERIC_READ | GENERIC_WRITE); if (device == INVALID_HANDLE_VALUE) device = ImDiskOpenDeviceByMountPoint(MountPoint, GENERIC_READ); if (device == INVALID_HANDLE_VALUE) switch (GetLastError()) { case ERROR_INVALID_PARAMETER: fputs("This version of Windows only supports drive letters as " "mount points.\r\n" "Windows 2000 or higher is required to support " "subdirectory mount points.\r\n", stderr); return IMDISK_CLI_ERROR_BAD_MOUNT_POINT; case ERROR_INVALID_FUNCTION: fputs("Mount points are only supported on NTFS volumes.\r\n", stderr); return IMDISK_CLI_ERROR_BAD_MOUNT_POINT; case ERROR_NOT_A_REPARSE_POINT: case ERROR_DIRECTORY: case ERROR_DIR_NOT_EMPTY: ImDiskOemPrintF(stderr, "Not a mount point: '%1!ws!'\n", MountPoint); return IMDISK_CLI_ERROR_BAD_MOUNT_POINT; default: PrintLastError(MountPoint); return IMDISK_CLI_ERROR_BAD_MOUNT_POINT; } } if (device == INVALID_HANDLE_VALUE) if (GetLastError() == ERROR_FILE_NOT_FOUND) { fputs("No such device.\r\n", stderr); return IMDISK_CLI_ERROR_DEVICE_NOT_FOUND; } else { PrintLastError(L"Error opening device:"); return IMDISK_CLI_ERROR_DEVICE_INACCESSIBLE; } if (!ImDiskCliCheckDriverVersion(device)) { CloseHandle(device); return IMDISK_CLI_ERROR_DRIVER_WRONG_VERSION; } if (FlagsToChange & (IMDISK_OPTION_RO | IMDISK_OPTION_REMOVABLE)) { puts("Flushing file buffers..."); FlushFileBuffers(device); puts("Locking volume..."); if (!DeviceIoControl(device, FSCTL_LOCK_VOLUME, NULL, 0, NULL, 0, &dw, NULL)) { PrintLastError(MountPoint == NULL ? L"Error" : MountPoint); CloseHandle(device); return IMDISK_CLI_ERROR_DEVICE_INACCESSIBLE; } puts("Dismounting filesystem..."); if (!DeviceIoControl(device, FSCTL_DISMOUNT_VOLUME, NULL, 0, NULL, 0, &dw, NULL)) { PrintLastError(MountPoint == NULL ? L"Error" : MountPoint); CloseHandle(device); return IMDISK_CLI_ERROR_DEVICE_INACCESSIBLE; } } puts("Setting new flags..."); device_flags.FlagsToChange = FlagsToChange; device_flags.FlagValues = Flags; if (!DeviceIoControl(device, IOCTL_IMDISK_SET_DEVICE_FLAGS, &device_flags, sizeof(device_flags), &device_flags, sizeof(device_flags), &dw, NULL)) PrintLastError(MountPoint); if (device_flags.FlagsToChange != 0) { CloseHandle(device); ImDiskOemPrintF(stderr, "%1!ws!: Not all new options were successfully changed.", MountPoint); return IMDISK_CLI_ERROR_CREATE_DEVICE; } else { CloseHandle(device); puts("Done."); return 0; } }
c
15
0.542813
79
31.172131
122
// Changes flags for an existing virtual disk, identified by either device // number or mount point. FlagsToChange specifies which flag bits to change, // (0=not touch, 1=set to corresponding bit value in Flags parameter).
function
UPTR StripComments(char* pStr, const char* pSingleLineComment, const char* pMultiLineCommentStart, const char* pMultiLineCommentEnd) { UPTR Len = strlen(pStr); if (pMultiLineCommentStart && pMultiLineCommentEnd) { UPTR MLCSLen = strlen(pMultiLineCommentStart); UPTR MLCELen = strlen(pMultiLineCommentEnd); char* pFound; while (pFound = strstr(pStr, pMultiLineCommentStart)) { char* pEnd = strstr(pFound + MLCSLen, pMultiLineCommentEnd); if (pEnd) { const char* pFirstValid = pEnd + MLCELen; *pFound = ' '; ++pFound; memmove(pFound, pFirstValid, Len - (pFirstValid - pStr)); Len -= (pFirstValid - pFound); pStr[Len] = 0; } else { *pFound = 0; Len = pFound - pStr; } } } if (pSingleLineComment) { UPTR SLCLen = strlen(pSingleLineComment); char* pFound; while (pFound = strstr(pStr, pSingleLineComment)) { char* pEnd = strpbrk(pFound + SLCLen, "\n\r"); if (pEnd) { const char* pFirstValid = pEnd + 1; memmove(pFound, pFirstValid, Len - (pFirstValid - pStr)); Len -= (pFirstValid - pFound); pStr[Len] = 0; } else { *pFound = 0; Len = pFound - pStr; } } } return Len; }
c++
15
0.635906
132
22.86
50
//!!!non-optimal, can rewrite in a reverse order to minimize memmove sizes! // Adds space for each multiline comment stripped to preserve token delimiting in a "name1/*comment*/name2" case
function
private void notification (JSONArray args, CallbackContext command) { int id = args.optInt(0); Options opts = getNotMgr().getOptions(id); if (opts != null) { command.success(opts.getDict()); } else { command.success(); } }
java
10
0.537162
69
32
9
/** * Options from local notification. * * @param args The exec() arguments in JSON form. * @param command The callback context used when calling back into * JavaScript. */
function
def batch_to_vectors(x): if x.ndim == 2: v = x n_rdm = x.shape[0] n_cond = _get_n_from_reduced_vectors(x) elif x.ndim == 3: m = x n_rdm = x.shape[0] n_cond = x.shape[1] v = np.ndarray((n_rdm, int(n_cond * (n_cond - 1) / 2))) for idx in np.arange(n_rdm): v[idx, :] = squareform(m[idx, :, :], checks=False) elif x.ndim == 1: v = np.array([x]) n_rdm = 1 n_cond = _get_n_from_reduced_vectors(v) return v, n_rdm, n_cond
python
17
0.467925
63
30.235294
17
converts a *stack* of RDMs in vector or matrix form into vector form Args: x: stack of RDMs Returns: tuple: **v** (np.ndarray): 2D, vector form of the stack of RDMs **n_rdm** (int): number of rdms **n_cond** (int): number of conditions
function
public static class IgnoreBadWindowHandler extends XBaseErrorHandler { @Override public int handleError(long display, XErrorEvent err) { if (err.get_error_code() == XConstants.BadWindow) { return 0; } return super.handleError(display, err); } // Shared instance private static IgnoreBadWindowHandler theInstance = new IgnoreBadWindowHandler(); public static IgnoreBadWindowHandler getInstance() { return theInstance; } }
java
10
0.621324
89
37.928571
14
/* * Instead of validating window id, we simply call XGetWindowProperty, * but temporary install this function as the error handler to ignore * BadWindow error. */
class
int TP_CategorizeMessage (char *s, int *offset, player_info_t **plr) { int i, msglen, len; int flags; player_info_t *player; char *name; playerview_t *pv = &cl.playerview[SP]; *offset = 0; *plr = NULL; flags = TPM_UNKNOWN; msglen = strlen(s); if (!msglen) return TPM_UNKNOWN; if ((s[0] == '^' && s[1] == '[') || (s[0] == '(' && s[1] == '^' && s[2] == '[')) { char *end, *info; i = 0; for(info = s; *info; ) { if (info[0] == '^' && info[1] == ']') break; if (*info == '\\') break; if (info[0] == '^' && info[1] == '^') info+=2; else info++; } for(end = info; *end; ) { if (end[0] == '^' && end[1] == ']') { *end = 0; info = Info_ValueForKey(info, "player"); if (*info) i = atoi(info)+1; *end = '^'; break; } if (end[0] == '^' && end[1] == '^') end+=2; else end++; } if (!*end || i < 1 || i > cl.allocated_client_slots) return TPM_UNKNOWN; if (*s == '(') { if (end[2] != ')') return TPM_UNKNOWN; end+=3; } else end+=2; if (*end++ != ':') return TPM_UNKNOWN; if (*end++ != ' ') return TPM_UNKNOWN; *plr = player = &cl.players[i-1]; *offset = end - s; if (*s == '(') flags = TPM_TEAM; else { if (player->spectator) flags |= TPM_SPECTATOR; else flags |= TPM_NORMAL; } } else { for (i=0, player=cl.players ; i < cl.allocated_client_slots ; i++, player++) { name = player->name; if (!(*name)) continue; len = strlen(name); if (len+2 <= msglen && s[len] == ':' && s[len+1] == ' ' && !strncmp(name, s, len)) { if (player->spectator) flags |= TPM_SPECTATOR; else flags |= TPM_NORMAL; *offset = len + 2; *plr = player; } else if (s[0] == '(' && len+4 <= msglen && !strncmp(s+len+1, "): ", 3) && !strncmp(name, s+1, len)) { if (pv->spectator) { unsigned int track = Cam_TrackNum(pv); if (i == track || ( cl.teamplay && !strcmp(cl.players[track].team, player->team)) ) { flags |= TPM_OBSERVEDTEAM; } } else { if (i == pv->playernum || ( cl.teamplay && !strcmp(cl.players[pv->playernum].team, player->team)) ) { flags |= TPM_TEAM; } } *offset = len + 4; *plr = player; } } } if (!flags) { char *qtv = NULL; if (!strncmp(s, "#0:qtv_say:#", 12)) { qtv = s+11; flags = TPM_QTV|TPM_SPECTATOR; } else if (!strncmp(s, "#0:qtv_say_game:#", 17)) { qtv = s+16; flags = TPM_QTV|TPM_SPECTATOR; } else if (!strncmp(s, "#0:qtv_say_team_game:#", 22)) { qtv = s+21; flags = TPM_QTV|TPM_TEAM|TPM_SPECTATOR; } if (flags) { *offset = (qtv - s); for (;;) { char *sub = qtv; if (*sub == '#') { strtoul(sub+1, &sub, 10); if (*sub++ == ':') { qtv = strstr(sub, ": "); if (qtv) { *offset = (sub - s); qtv += 2; continue; } } } break; } } } return flags; }
c
26
0.45509
81
18.031646
158
/* ====================== TP_CategorizeMessage returns a combination of these values: 0 -- unknown 1 -- normal 2 -- team message 4 -- spectator 16 -- faked or serverside Note that sometimes we can't be sure who really sent the message, e.g. when there's a player "unnamed" in your team and "(unnamed)" in the enemy team. The result will be 3 (1+2) Never returns 2 if we are a spectator. Now additionally returns player info (NULL if no player detected) ====================== */
function
private class MessagePanel extends BorderPane { final private Label lblMessage = new Label(); final private ReadOnlyProperty< Exception > lastException; public MessagePanel( ReadOnlyProperty< Exception > lastException ) { this.setCenter( lblMessage ); this.lastException = lastException; this.lastException.addListener( new ChangeListener< Exception >() { @Override public void changed(ObservableValue<? extends Exception> observable, Exception oldValue, Exception newValue) { setMessage( newValue == null? "" : newValue.getMessage() ); } }); } public void setMessage( String message ) { this.lblMessage.setText( message ); } }
java
18
0.603448
126
35.954545
22
/** * Panel for displaying any messages that may be received from the data * source. */
class
private static void printlnEyecatcherMsg(String msgType, String msg) { if (accumulatedMsgLists == null) { Report.initLocalStore(); } Report.accumulatedMsgLists.get(msgType).add(msg); if (!lastOutputNewline) { Report.println(); } incrementIndentation(); Report.printlnIndent("**** " + msgType + " **** " + msg + " **"); decrementIndentation(); }
java
11
0.6703
70
29.666667
12
/** * Output a standardized "eye-catcher" message to the appropriate stream. * @param msgType Type of message (e.g. FAILURE). * @param msg Message to use. */
function
public class WorkflowAccesor { private final SrampAtomApiClient _client; /** * Instantiates a new workflow accesor. */ public WorkflowAccesor() { _client = SrampAtomApiClientFactory.createAtomApiClient(); } public enum WorkflowStatusEnum implements Serializable { CREATED, RUNNING, ABORTED, COMPLETED } /** * Converts the values passed as params in sramp workflow artifact. * * @param uuid * @param targetUUID * @param targetName * @param workflow * @param processInstanceId * @param status * @param parameters * @return the base artifact type */ private BaseArtifactType toWorkflowArtifact(String uuid, String targetUUID, String targetName, String workflow, WorkflowStatusEnum status, Map<String, String> parameters) { ExtendedArtifactType artifact = new ExtendedArtifactType(); // Set the UUID so that we only ever create one of these artifact.setUuid(uuid); artifact.setArtifactType(BaseArtifactEnum.EXTENDED_ARTIFACT_TYPE); artifact.setExtendedType(DtgovModel.WorkflowInstanceType); artifact.setName(buildArtifactName(targetName)); SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_WORKFLOW, workflow); SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_STATUS, status.name()); SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_ARTIFACT_ID, targetUUID); SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_ARTIFACT_NAME, targetName); if (parameters != null && parameters.size() > 0) { SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_NUM_PARAMS, parameters.size() + ""); //$NON-NLS-1$ for (String param_key : parameters.keySet()) { SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_PARAM_PREFIX + param_key, parameters.get(param_key)); } } SrampModelUtils.addGenericRelationship(artifact, DtgovModel.RELATIONSHIP_ARTIFACT_GOVERNED, targetUUID); return artifact; } /** * Save a new workflow type artifact. * * @param workflowUUID * @param targetUUID * @param targetName * @param workflow * @param processInstanceId * @param parameters * @throws SrampClientException * @throws SrampAtomException */ public BaseArtifactType save(String workflowUUID, String targetUUID, String targetName, String workflow, Map<String, String> parameters) throws SrampClientException, SrampAtomException { BaseArtifactType artifact = toWorkflowArtifact(workflowUUID, targetUUID, targetName, workflow, WorkflowStatusEnum.CREATED, parameters); return _client.createArtifact(artifact); } /** * Updates the workflow artifact with the process instance ID of the process instance * we just created. * @param artifact * @param processInstanceId * @throws SrampAtomException * @throws SrampClientException */ public void update(BaseArtifactType artifact, long processInstanceId) throws SrampClientException, SrampAtomException { SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_STATUS, WorkflowStatusEnum.RUNNING.name()); SrampModelUtils.setCustomProperty(artifact, DtgovModel.CUSTOM_PROPERTY_PROCESS_ID, String.valueOf(processInstanceId)); _client.updateArtifactMetaData(artifact); } /** * Gets the process ids. * * @param targetUUID * the target uuid * @return the process ids * @throws SrampClientException * the sramp client exception * @throws SrampAtomException * the sramp atom exception */ public List<Long> getProcessIds(String targetUUID) throws SrampClientException, SrampAtomException { List<Long> processes = new ArrayList<Long>(); String query = "/s-ramp/ext/" + DtgovModel.WorkflowInstanceType; //$NON-NLS-1$ query += "[@" + DtgovModel.CUSTOM_PROPERTY_ARTIFACT_ID + "= ?]"; //$NON-NLS-1$ //$NON-NLS-2$ SrampClientQuery queryClient = _client.buildQuery(query); queryClient = queryClient.propertyName(DtgovModel.CUSTOM_PROPERTY_PROCESS_ID); queryClient=queryClient.parameter(targetUUID); QueryResultSet resultSet = queryClient.query(); for (ArtifactSummary summary : resultSet) { String processId = summary.getCustomPropertyValue(DtgovModel.CUSTOM_PROPERTY_PROCESS_ID); processes.add(new Long(processId)); } return processes; } /** * Exist running workflow. * * @param targetUUID * the target uuid * @param targetName * the target name * @param workflow * the workflow * @param processInstanceId * the process instance id * @param parameters * the parameters * @return true, if successful * @throws SrampClientException * the sramp client exception * @throws SrampAtomException * the sramp atom exception */ public boolean existRunningWorkflow(String targetUUID, String targetName, String workflow, String processInstanceId, Map<String, String> parameters) throws SrampClientException, SrampAtomException { SrampClientQuery query = buildQuery(targetUUID, targetName, workflow, processInstanceId, null, parameters); query = query.startIndex(0); QueryResultSet resultSet = query.query(); if (resultSet.size() > 0) { return true; } return false; } /** * Builds the artifact name. * * @param targetName * the target name * @return the string */ private String buildArtifactName(String targetName) { return targetName + "_workflow"; //$NON-NLS-1$ } /** * Builds the query. * * @param targetUUID * the target uuid * @param targetName * the target name * @param workflow * the workflow * @param processInstanceId * the process instance id * @param status * the status * @param parameters * the parameters * @return the sramp client query */ private SrampClientQuery buildQuery(String targetUUID, String targetName, String workflow, String processInstanceId, WorkflowStatusEnum status, Map<String, String> parameters) { StringBuilder queryBuilder = new StringBuilder(); // Initial query queryBuilder.append("/s-ramp/ext/" + DtgovModel.WorkflowInstanceType); //$NON-NLS-1$ List<String> criteria = new ArrayList<String>(); List<Object> params = new ArrayList<Object>(); criteria.add("fn:matches(@name, ?)"); //$NON-NLS-1$ params.add(buildArtifactName(targetName)); criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_ARTIFACT_ID + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(targetUUID); criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_ARTIFACT_NAME + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(targetName); if (StringUtils.isNotBlank(workflow)) { criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_WORKFLOW + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(workflow); } if (StringUtils.isNotBlank(processInstanceId)) { criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_PROCESS_ID + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(processInstanceId); } if (status != null) { criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_STATUS + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(status.name()); } if (parameters != null && parameters.size() > 0) { criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_NUM_PARAMS + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(parameters.size() + ""); //$NON-NLS-1$ for (String param_key : parameters.keySet()) { criteria.add("@" + DtgovModel.CUSTOM_PROPERTY_PARAM_PREFIX+param_key + "= ?"); //$NON-NLS-1$ //$NON-NLS-2$ params.add(parameters.get(param_key)); } } // Now create the query predicate from the generated criteria if (criteria.size() > 0) { queryBuilder.append("["); //$NON-NLS-1$ queryBuilder.append(StringUtils.join(criteria, " and ")); //$NON-NLS-1$ queryBuilder.append("]"); //$NON-NLS-1$ } SrampClientQuery query = _client.buildQuery(queryBuilder.toString()); for (Object param : params) { if (param instanceof String) { query.parameter((String) param); } if (param instanceof Calendar) { query.parameter((Calendar) param); } } return query; } /** * Gets the client. * * @return the client */ public SrampAtomApiClient getClient() { return _client; } }
java
16
0.625458
147
38.858369
233
/** * Clas with the necessary methods to access to the s-ramp workflow artifacts * * @author David Virgil Naranjo */
class
def LearnToExecute( batch_size, max_length=1, max_nesting=1, token_by_char=True, mode=Mode.TRAIN_COMBINE, loss_threshold=0.1, min_tries=DEFAULT_MIN_CURRICULUM_EVAL_TRIES, task_type=TaskType.ALG_CTRL): if mode == Mode.TRAIN_COMBINE: curriculum = CombineCurriculum( max_length, max_nesting, loss_threshold, min_tries=min_tries) elif mode == Mode.TRAIN_MIX: curriculum = MixCurriculum( max_length, max_nesting, loss_threshold, min_tries=min_tries) elif mode == Mode.TRAIN_NAIVE: curriculum = NaiveCurriculum( max_length, max_nesting, loss_threshold, min_tries=min_tries) elif mode == Mode.TEST: curriculum = BaselineCurriculum( max_length, max_nesting, loss_threshold, min_tries=0) else: raise ValueError("Invalid mode.") lte = LearnToExecuteState(batch_size, max_length, max_nesting, curriculum, token_by_char, task_type=task_type) types_ = (tf.float32, tf.float32, tf.float32, tf.int64, tf.int64) shapes_ = (tf.TensorShape([lte.num_steps, batch_size, lte.vocab_size]), tf.TensorShape([lte.num_steps_out, batch_size, lte.vocab_size]), tf.TensorShape([lte.num_steps_out, batch_size, lte.vocab_size]), tf.TensorShape([batch_size,]), tf.TensorShape([batch_size,])) dataset = tf.data.Dataset.from_generator(lte.make_batch, types_, shapes_) dataset.state = lte return dataset
python
11
0.670146
78
48.586207
29
Factory method for LearnToExecute Dataset module. Args: batch_size: (int). The number of elements in a mini-batch. max_length: (int). Maximum character length. max_nesting: (int). Maximum level of statement nesting. token_by_char: (bool). Tokenize by character or words? mode: (string). Either 'train', 'test'. loss_threshold: (int) curriculum threshold for error below which increase the task difficulty. min_tries: (int) minimum update tries for curriculum difficulty level. task_type: (string) defines the task by allowable ops (see TASK_TYPE_OPS). Returns: tf.Data.Dataset for LearnToExecute sample generator with the LearnToExecuteState monkey patched into the `state` attribute. Raises: ValueError: in case of bad `mode`.
function
READ8_MEMBER( snes_ppu_device::oam_read ) { offset &= 0x1ff; if (offset & 0x100) offset &= 0x10f; if (!m_screen_disabled) { uint16_t v = screen().vpos(); if (v < m_beam.last_visible_line) offset = 0x010c; } return (m_oam_ram[offset] >> (PPU_REG(OAMDATA) << 3)) & 0xff; }
c++
11
0.607018
62
21
13
/************************************************* SNES OAM accesses: OAM accesses during active display are allowed. The actual address varies during rendering, as the PPU reads in data itself for processing. Unfortunately, no one has been able (yet) to determine how this works. The only known game to actually access OAM during active display is Uniracers and it expects accesses to map to offset 0x0218. Hence, following byuu's choice we rerouted OAM accesses during active display to 0x0218 (0x010c in our snes_oam). This is a hack, but it is more accurate than writing to the 'expected' address set by $2102,$2103. Notice that, since PPU_REG(OAMDATA) is never read/written directly, we use it as an index to choose the high/low byte of the snes_oam word. *************************************************/
function
prng(const size_t n, const std::vector<int_type>& seed, const bool deterministic = false) { rng_state s; s.deterministic = deterministic; constexpr size_t len = rng_state::size(); auto n_seed = seed.size() / len; for (size_t i = 0; i < n; ++i) { if (i < n_seed) { std::copy_n(seed.begin() + i * len, len, std::begin(s.state)); } else { dust::random::jump(s); } state_.push_back(s); } }
c++
13
0.532751
70
29.6
15
/// Construct a new `prng` object from a vector of seed data. We /// will consume as many items of `seed` as possible, then start /// jumping /// /// @param seed A vector of integers to seed the generator with
function
func Run(in context.Context, s Spec) error { ctx, cancel := context.WithCancel(in) defer cancel() defer s.cstore.Flush(context.Background()) cursor, err := s.cstore.GetCursor(ctx, s.consumer.Name()) if err != nil { return errors.Wrap(err, "get cursor error") } if resetter, ok := s.consumer.(resetter); ok { err := resetter.Reset() if err != nil { return errors.Wrap(err, "reset error") } } var ( lag time.Duration opts []StreamOption ) for _, opt := range s.opts { var temp StreamOptions opt(&temp) if temp.Lag > 0 { lag = temp.Lag } else { opts = append(opts, opt) } } sc, err := s.stream(ctx, cursor, opts...) if err != nil { return err } if closer, ok := sc.(io.Closer); ok { defer closer.Close() } for { e, err := sc.Recv() if err != nil { return errors.Wrap(err, "recv error") } ctx := log.ContextWith(ctx, j.MKS{ "event_id": e.ID, "event_fid": e.ForeignID, }) if delay := lag - since(e.Timestamp); lag > 0 && delay > 0 { t := newTimer(delay) select { case <-ctx.Done(): t.Stop() return ctx.Err() case <-t.C: } } if err := s.consumer.Consume(ctx, fate.New(), e); err != nil { return errors.Wrap(err, "consume error") } if err := s.cstore.SetCursor(ctx, s.consumer.Name(), e.ID); err != nil { return errors.Wrap(err, "set cursor error") } } }
go
14
0.592375
74
21.75
60
// Run executes the spec by streaming events from the current cursor, // feeding each into the consumer and updating the cursor on success. // It always returns a non-nil error. Cancel the context to return early.
function
def to_bytes(cls, name, start_state): visited = {} for state in cls.bfs(start_state): if state.name in visited and visited[state.name] is not state: raise ValueError("Found duplicate state name {}. " "Cannot serialize a FSM with duplicate state name".format(state.name)) visited[state.name] = state pb_fsm = wca_state_machine_pb2.StateMachine( name=name, start_state=start_state.name) for (state_name, state) in list(visited.items()): pb_fsm.states.extend([state.to_desc()]) return pb_fsm.SerializeToString()
python
14
0.592248
103
52.833333
12
Serialize a FSM to bytes. States in the FSM are discovered using a breadth-first search (see the bfs method in this class). Args: name (string): The name of the FSM. start_state (State): The start state of the FSM. Raises: ValueError: raised when there are duplicate state names. Returns: bytes: Serialized FSM in bytes. Format is defined in wca_state_machine.proto.
function
class PlayfairCipher { private: char key[5][5]; map<char, pair<int, int> > keymap; public: PlayfairCipher(const string s) { int c, k, i = 0, j = 0, n = s.size(); bool a[26] = {0}; key[0][0] = s[0]; a[s[0] - 65] = 1; for (k = 1; k < n; k++) { c = s[k] - 65; if (!a[c]) { j++; if (j >= 5) { j = 0; i++; } key[i][j] = s[k]; a[c] = 1; if (c == 8 || c == 9) a[8] = a[9] = 1; } } for (c = 0; c < 26; c++) { if (!a[c]) { j++; if (j >= 5) { j = 0; i++; } key[i][j] = char(c + 65); a[c] = 1; if (c == 8 || c == 9) a[8] = a[9] = 1; } } for (i = 0; i < 5; i++) { for (j = 0; j < 5; j++) keymap[key[i][j]] = make_pair(i, j); } } string encrypt(string s) { int i, row1, row2, col1, col2, n = s.size(); string res = ""; char prev = s[0]; res += prev; for (i = 1; i < n; i++) { if (prev == s[i]) res += "X"; res += s[i]; prev = s[i]; } if (res.size() % 2) res += "X"; s = res; res = ""; n = s.size(); for (i = 0; i < n; i += 2) { row1 = keymap[s[i]].first; col1 = keymap[s[i]].second; row2 = keymap[s[i + 1]].first; col2 = keymap[s[i + 1]].second; if (row1 == row2) { col1 = (col1 + 1) % 5; col2 = (col2 + 1) % 5; } else if (col1 == col2) { row1 = (row1 + 1) % 5; row2 = (row2 + 1) % 5; } else { swap(col1, col2); } res += key[row1][col1]; res += key[row2][col2]; } return res; } string decrypt(string s) { int i, row1, row2, col1, col2, n = s.size(); string res = ""; for (i = 0; i < n; i += 2) { row1 = keymap[s[i]].first; col1 = keymap[s[i]].second; row2 = keymap[s[i + 1]].first; col2 = keymap[s[i + 1]].second; if (row1 == row2) { col1 = (col1 + 4) % 5; col2 = (col2 + 4) % 5; } else if (col1 == col2) { row1 = (row1 + 4) % 5; row2 = (row2 + 4) % 5; } else { swap(col1, col2); } res += key[row1][col1]; res += key[row2][col2]; } return res; } }
c++
16
0.270524
52
23.959677
124
//Playfair cipher (treating X as uncommon letter and assuming I & J are interchangeable)
class
public class VunglePlayAdCallback implements PlayAdCallback { private final WeakReference<VungleBannerAdapter> adapterReference; private final WeakReference<VungleListener> listenerReference; private final VungleBannerAd vungleBannerAd; public VunglePlayAdCallback(@NonNull VungleListener listener, @NonNull VungleBannerAdapter adapter, @Nullable VungleBannerAd vungleBannerAd) { this.listenerReference = new WeakReference<>(listener); this.adapterReference = new WeakReference<>(adapter); this.vungleBannerAd = vungleBannerAd; } @Override public void onAdStart(String placementID) { VungleListener listener = listenerReference.get(); VungleBannerAdapter adapter = adapterReference.get(); if (listener != null && adapter != null && adapter.isRequestPending()) { listener.onAdStart(placementID); } } @Override @Deprecated public void onAdEnd(String placementID, boolean completed, boolean isCTAClicked) { } @Override public void onAdEnd(String placementID) { VungleListener listener = listenerReference.get(); VungleBannerAdapter adapter = adapterReference.get(); if (listener != null && adapter != null && adapter.isRequestPending()) { listener.onAdEnd(placementID); } } @Override public void onAdClick(String placementID) { VungleListener listener = listenerReference.get(); VungleBannerAdapter adapter = adapterReference.get(); if (listener != null && adapter != null && adapter.isRequestPending()) { listener.onAdClick(placementID); } } @Override public void onAdRewarded(String placementID) { VungleListener listener = listenerReference.get(); VungleBannerAdapter adapter = adapterReference.get(); if (listener != null && adapter != null && adapter.isRequestPending()) { listener.onAdRewarded(placementID); } } @Override public void onAdLeftApplication(String placementID) { VungleListener listener = listenerReference.get(); VungleBannerAdapter adapter = adapterReference.get(); if (listener != null && adapter != null && adapter.isRequestPending()) { listener.onAdLeftApplication(placementID); } } @Override public void onError(String placementID, VungleException exception) { VungleManager.getInstance().removeActiveBannerAd(placementID, vungleBannerAd); VungleListener listener = listenerReference.get(); VungleBannerAdapter adapter = adapterReference.get(); if (listener != null && adapter != null && adapter.isRequestPending()) { listener.onAdFailedToLoad(exception.getExceptionCode()); } } @Override public void onAdViewed(String placementID) { // No-op. To be mapped to respective adapter events in future release. } }
java
12
0.731062
86
33.936709
79
/** * Vungle adapter implementation of {@link PlayAdCallback}. Since the Vungle SDK keeps a strong * mapping of ads with strong references to callbacks, this callback class must have no strong * references to an adapter object. */
class
def alloc_pf(self, vm='', number=-1, socket=-1, pflist=[]): ports = [] if number != -1: for pci in self.unused_ports: if pci != 'unused' and number != 0: if self.__port_on_socket(pci, socket) is True: self.__port_used(pci) ports.append(pci) number = number - 1 if number != 0: print "Can't allocated requested PF devices!!!" if pflist is not None: for pci in pflist: if self.__port_isused(pci) is True: print "Port %s has been used!!!" % pci else: if self.__port_on_socket(pci, socket) is True: self.__port_used(core) ports.append(core) if vm not in self.allocated_info: self.allocated_info[vm] = {} self.allocated_info[vm]['ports'] = ports return ports
python
15
0.450151
66
42.217391
23
There're two options for request pf devices for vm. If number is not -1, just allocate pf device from not used pfs. If list is not None, will allocate pf devices after checked.
function
func (t *Template) Compile() []byte { var template *upstream.Template if t.use != "" { logrus.Debugf("using requested %s", t.use) template = t.Lookup(t.use) if template == nil { logrus.Fatalf("unable to find %s", t.use) } } else { templates := t.Templates() for _, v := range templates { if v.Name() == "base.gohtml" || v.Name() == "root.gohtml" { template = v break } } if template == nil { template = templates[0] if template == nil { logrus.Fatalln("no template found") } } } buf := &bytes.Buffer{} logrus.Debugf("executing %s", template.Name()) if err := template.Execute(buf, ""); err != nil { logrus.Fatalln(err) } return buf.Bytes() }
go
13
0.592857
62
22.366667
30
// Compile runs exec on the template. // Before you hit this stage you should really be // running Load(), and Parse() to get ready.
function
def _publisher_save_public(self, obj): prev_sibling = self.get_previous_fitlered_sibling(publisher_is_draft=True, publisher_public__isnull=False) if not self.publisher_public_id: if prev_sibling: obj.insert_at(prev_sibling.publisher_public, position='right', commit=False) else: parent, public_parent = self.parent, None if parent: public_parent = parent.publisher_public if public_parent: obj.insert_at(public_parent, commit=False) else: prev_public_sibling = self.old_public.get_previous_fitlered_sibling() if not self.level == self.old_public.level or \ not (self.level > 0 and self.parent.publisher_public == self.old_public.parent) or \ not prev_sibling == prev_public_sibling == None or \ (prev_sibling and prev_sibling.publisher_public_id == prev_public_sibling.id): if prev_sibling: obj.insert_at(prev_sibling.publisher_public, position="right") elif self.parent: target = self.parent.publisher_public obj.insert_at(target, position='first-child') else: next_sibling = self.get_next_filtered_sibling() if next_sibling and next_sibling.publisher_public_id: obj.insert_at(next_sibling.publisher_public, position="left") else: prev_sibling = self.old_public.get_previous_fitlered_sibling() if prev_sibling: obj.insert_at(prev_sibling, position="right") elif self.old_public.parent: target = self.old_public.parent obj.insert_at(target, position='first-child') else: next_sibling = self.old_public.get_next_filtered_sibling() if next_sibling and next_sibling.publisher_public_id: obj.insert_at(next_sibling, position="left") obj.save() return obj
python
19
0.555504
114
54.461538
39
Mptt specific stuff before the object can be saved, overrides original publisher method. Args: obj - public variant of `self` to be saved.
function
[RequireComponent(typeof(RectTransform))] public class JCS_PanelChild : MonoBehaviour { private RectTransform mRectTransform = null; [Header("** Check Variables (JCS_PanelChild) **")] [Tooltip("Panel root object cache.")] [SerializeField] private JCS_PanelRoot mPanelRoot = null; [Tooltip("Is this component the Unity defined UI component?")] [SerializeField] private bool mIsUnityDefinedUI = false; public JCS_PanelRoot PanelRoot { get { return this.mPanelRoot; } set { this.mPanelRoot = value; } } private void Awake() { this.mRectTransform = this.GetComponent<RectTransform>(); if (mPanelRoot == null) mPanelRoot = this.GetComponentInParent<JCS_PanelRoot>(); this.mIsUnityDefinedUI = IsUnityDefinedUI(); { if (mPanelRoot != null) { FitPerfectSize( mPanelRoot.PanelDeltaWidthRatio, mPanelRoot.PanelDeltaHeightRatio); } if (!mIsUnityDefinedUI) { AddPanelChild(); } } } public void FitPerfectSize(float xRatio, float yRatio) { { List<Transform> childs = null; if (!mIsUnityDefinedUI) { 這個有點暴力解法... 不知道為什麼Unity沒有辦法 在初始化階段一次清乾淨. childs = JCS_Utility.ForceDetachChildren(this.mRectTransform); } Vector3 newScale = mRectTransform.localScale; newScale.x = newScale.x / xRatio; newScale.y = newScale.y / yRatio; mRectTransform.localScale = newScale; if (!mIsUnityDefinedUI) { NOTE(jenchieh): Reattach all the previous child. JCS_Utility.AttachChildren(this.mRectTransform, childs); } } { Vector3 newPosition = mRectTransform.localPosition; newPosition.x = newPosition.x / xRatio; newPosition.y = newPosition.y / yRatio; set to the new position mRectTransform.localPosition = newPosition; } } /Add all child with same effect. private void AddPanelChild() { Transform tempTrans = this.transform; for (int index = 0; index < transform.childCount; ++index) { Transform child = tempTrans.GetChild(index); Only added once. if (child.GetComponent<JCS_PanelChild>() != null) continue; JCS_PanelChild panelChild = child.gameObject.AddComponent<JCS_PanelChild>(); panelChild.PanelRoot = mPanelRoot; } } /UI component that we do not want to mess up with. /<returns></returns> private bool IsUnityDefinedUI() { return (this.GetComponent<Button>() || this.GetComponent<Dropdown>() || this.GetComponent<Slider>() || this.GetComponent<Scrollbar>() || this.GetComponent<Toggle>() || this.GetComponent<InputField>()); } }
c#
16
0.511163
107
38.204545
88
/// <summary> /// This do the same thing as JCS_PanelRoot, but instead of all /// the child have to check JCS_PanelRoot is vague.The solution /// from this, we decide to have another component by name it /// differently and loop through the component and check if the /// panel has the correct proportion and scaling. Notice this /// class already been set by other same component, this will not /// be active. /// </summary>
class
private void ProcessToolsVersionDependentProperties() { this.ReservedProperties.SetProperty(new BuildProperty(ReservedPropertyNames.binPath, EscapingUtilities.Escape(this.Toolset.ToolsPath), PropertyType.ReservedProperty)); this.ReservedProperties.SetProperty(new BuildProperty(ReservedPropertyNames.toolsPath, EscapingUtilities.Escape(this.Toolset.ToolsPath), PropertyType.ReservedProperty)); this.ReservedProperties.SetProperty(new BuildProperty(ReservedPropertyNames.toolsVersion, EscapingUtilities.Escape(ToolsVersion), PropertyType.ReservedProperty)); }
c#
17
0.696884
101
57.916667
12
/// <summary> /// Prepares the MSBuildToolsPath and MSBuildBinPath reserved properties /// </summary>
function
public class frolog { /** * @param args */ static Process geni; static Process racer; public static CommandLineOptions tulipaOptions; public static Grammar tulipaGrammar; // public static String[] thematicRoles= {"agent","patient","instrument","source","goal","instrument"}; public static Set<String> discourseModel = new HashSet<String>(); public static String lastIndividual = ""; public static String PreferedOrdering = "['colour','(some haslocation *top*)','(some hasdetail *top*)']"; //primitive concepts are those concepts that are not defined in the tbox, //they are either in the game abox or asserted through action effects public static List<String> primitiveConcepts = Arrays.asList("drawingroom","treasury","room","wall","green","white", "player","alive","frog","ugly","brown","sword","small", "crown","silver","apple","dragon","worm", "red","chest","wooden","locked","unlocked","closed","open","couch","couchleg","golden","key","table","southexit","northexit", "hasdetail","hold","fitsin","hasexit","leadsto","hascounterpart","gone","happy","beautiful","pizza","yellow"); public static void main(String[] args) { // System.load("/usr/lib/pl-5.6.54/lib/i386-linux/libjpl.so"); initialize(); PlayerInterface.startInterface(); try { geni.destroy(); System.out.println("Geni closed properly"); racer.destroy(); System.out.println("Racer closed properly"); } catch (Exception e) { System.err.println(e); System.exit(0); } } public static void initialize(){ try { //I am not using morphology in geni because it's not working (see geni mailing list) --morphlexicon GameScenarios/FairyTaleCastle/ObjectGrammarGeni/morph.mph //Alexandre Denis has this option when calling Geni, I don't know what's its purpose --rootfeat='[cat:Nom|NomPropre|Sen|Prix|Pro]' geni = Runtime.getRuntime().exec("NLPModules/Realization/geniserver -m GameScenarios/FairyTaleCastle/ObjectGrammarGeni/lu-small-grammar.geni -l GameScenarios/FairyTaleCastle/ObjectGrammarGeni/lexicon.geni --rootfeat='[cat:s|np]' &"); System.out.println("Geni (my generator) started successfully"); //racer = Runtime.getRuntime().exec("KBInterfaces/RacerInterface/RacerPro-1-9-2-beta/RacerPro -silent > racer_log.txt &"); racer = Runtime.getRuntime().exec("KBInterfaces/RacerInterface/RacerPro-Linux32-1-9-3-Beta/RacerPro -silent &"); System.out.println("Racer (my knowledge base manager) started successfully"); new Query("consult('KBInterfaces/RacerInterface/racer.pl')").hasSolution(); Query q3 = new Query("racer:load_scenario(f)"); System.out.println(q3.hasSolution() ? "The game scenario knowledge bases were loaded" : "The game scenario failed to load"); String[] commandline = "-g GameScenarios/FairyTaleCastle/ObjectGrammarTulipa/lu-small-grammar.xml -l GameScenarios/FairyTaleCastle/ObjectGrammarTulipa/lexicon.xml -m GameScenarios/FairyTaleCastle/ObjectGrammarTulipa/morph.xml -a s -i -n".split(" "); tulipaOptions = Tulipa.processCommandLine(commandline); tulipaGrammar = Tulipa.initializeTulipa(tulipaOptions); System.out.println("Tulipa (my parser) started successfully"); new Query("consult('NLPModules/ReferenceGeneration/referenceGeneration.pl')").hasSolution(); new Query("consult('NLPModules/ReferenceResolution/resolve.pl')").hasSolution(); new Query("consult('NLPModules/Actions/execute.pl')").hasSolution(); new Query("consult('NLPModules/ContentDetermination/describe.pl')").hasSolution(); new Query("consult('GameScenarios/FairyTaleCastle/actionDatabase.pl')").hasSolution(); } catch (Exception e) { System.err.println(e); } } }
java
13
0.723364
252
48.32
75
/** * This is the entry point of the application * * There are several libraries that need to be loaded: gecodej, jpl * I've specified this as an argument to the VM in the eclipse project like this: * -Djava.library.path="/usr/lib/pl-5.6.54/lib/i386-linux:/usr/local/lib" * May be it's better to load them in the code using System.load(arg0) * * @author benottil * */
class
public string ToString(ImageFormat desiredFormat) { Contract.Requires<NullReferenceException>(InternalBitmap != null); desiredFormat = desiredFormat.Check(ImageFormat.Jpeg); using (MemoryStream Stream = new MemoryStream()) { InternalBitmap.Save(Stream, desiredFormat); return Stream.ToArray().ToString(Base64FormattingOptions.None); } }
c#
11
0.619369
79
43.5
10
/// <summary> /// Converts an SwiftBitmap to a base64 string and returns it /// </summary> /// <param name="desiredFormat">Desired SwiftBitmap format (defaults to Jpeg)</param> /// <returns>The SwiftBitmap in base64 string format</returns>
function
def streamStarted(self, rootElement): if rootElement.hasAttribute("version"): version = rootElement["version"].split(".") try: version = (int(version[0]), int(version[1])) except (IndexError, ValueError): version = (0, 0) else: version = (0, 0) self.xmlstream.version = min(self.xmlstream.version, version)
python
13
0.543902
69
40.1
10
Called by the XmlStream when the stream has started. A stream is considered to have started when the start tag of the root element has been received. This examines C{rootElement} to see if there is a version attribute. If absent, C{0.0} is assumed per RFC 3920. Subsequently, the minimum of the version from the received stream header and the value stored in L{xmlstream} is taken and put back in L{xmlstream}. Extensions of this method can extract more information from the stream header and perform checks on them, optionally sending stream errors and closing the stream.
function
def adjust_kwargs_to_engine(kwargs): new_kwargs = copy(kwargs) if 'delimiter' not in new_kwargs: if 'sep' in kwargs: new_kwargs['delimiter'] = new_kwargs['sep'] del new_kwargs['sep'] else: new_kwargs['delimiter'] = ',' return new_kwargs
python
12
0.502994
59
36.222222
9
Makes sure that passed kwargs match the engine. As c engine requires a different argument for separators.
function
func (driver *Redshift) Open(dsn string) (database.Driver, error) { parsed, err := url.Parse(dsn) if err != nil { return nil, err } parsed.Scheme = "postgres" psql, err := driver.Driver.Open(parsed.String()) if err != nil { return nil, err } return &Redshift{Driver: psql}, nil }
go
10
0.659794
67
23.333333
12
// Open implements the database.Driver interface by parsing the URL, switching the scheme from "redshift" to "postgres", and delegating to the underlying PostgreSQL driver.
function
def _cluster_results_loop(self, interval_secs=60): try: last_appended = self._cluster_results_update() while self._end_time_secs is None: with self.__sleep_cv: self.__sleep_cv.wait(interval_secs) append_time_secs = self._cluster_results_update(last_appended) if append_time_secs is not None: last_appended = append_time_secs except BaseException as exc: with self._lock: self._status = Status.INTERNAL_ERROR self.__error_message = str(exc) log.exception("Unhandled exception occurred inside " "_cluster_results_loop") else: log.debug("_cluster_results_loop exited cleanly") finally: log.debug("%s: Exiting _cluster_results_loop", self)
python
13
0.627756
70
39.631579
19
Periodically update the cluster results while the scenario is running.
function
def add_user_to_cohort(cohort, username_or_email_or_user): try: if hasattr(username_or_email_or_user, 'email'): user = username_or_email_or_user else: user = get_user_by_username_or_email(username_or_email_or_user) membership, previous_cohort = CohortMembership.assign(cohort, user) tracker.emit( "edx.cohort.user_add_requested", { "user_id": user.id, "cohort_id": cohort.id, "cohort_name": cohort.name, "previous_cohort_id": getattr(previous_cohort, 'id', None), "previous_cohort_name": getattr(previous_cohort, 'name', None), } ) cache = RequestCache(COHORT_CACHE_NAMESPACE).data cache_key = _cohort_cache_key(user.id, membership.course_id) cache[cache_key] = membership.course_user_group COHORT_MEMBERSHIP_UPDATED.send(sender=None, user=user, course_key=membership.course_id) return user, getattr(previous_cohort, 'name', None), False except User.DoesNotExist as ex: try: validate_email(username_or_email_or_user) try: assignment = UnregisteredLearnerCohortAssignments.objects.get( email=username_or_email_or_user, course_id=cohort.course_id ) assignment.course_user_group = cohort assignment.save() except UnregisteredLearnerCohortAssignments.DoesNotExist: assignment = UnregisteredLearnerCohortAssignments.objects.create( course_user_group=cohort, email=username_or_email_or_user, course_id=cohort.course_id ) tracker.emit( "edx.cohort.email_address_preassigned", { "user_email": assignment.email, "cohort_id": cohort.id, "cohort_name": cohort.name, } ) return (None, None, True) except ValidationError as invalid: if "@" in username_or_email_or_user: raise invalid else: raise ex
python
17
0.557422
105
43.979592
49
Look up the given user, and if successful, add them to the specified cohort. Arguments: cohort: CourseUserGroup username_or_email_or_user: user or string. Treated as email if has '@' Returns: User object (or None if the email address is preassigned), string (or None) indicating previous cohort, and whether the user is a preassigned user or not Raises: User.DoesNotExist if can't find user. However, if a valid email is provided for the user, it is stored in a database so that the user can be added to the cohort if they eventually enroll in the course. ValueError if user already present in this cohort. ValidationError if an invalid email address is entered. User.DoesNotExist if a user could not be found.
function
fn embedding(&self, word: &str) -> Option<Py<PyArray1<f32>>> { let embeddings = self.embeddings.borrow(); embeddings.embedding(word).map(|e| { let gil = pyo3::Python::acquire_gil(); e.into_owned().into_pyarray(gil.python()).to_owned() }) }
rust
13
0.563574
64
40.714286
7
/// Get the embedding for the given word. /// /// If the word is not known, its representation is approximated /// using subword units.
function
def remove_chimeras_denovo_from_seqs(seqs_fp, working_dir, threads=1): logger = logging.getLogger(__name__) logger.info('remove_chimeras_denovo_from_seqs seqs file %s' 'to working dir %s' % (seqs_fp, working_dir)) output_fp = join( working_dir, "%s.no_chimeras" % basename(seqs_fp)) params = ['vsearch', '--uchime_denovo', seqs_fp, '--nonchimeras', output_fp, '-dn', '0.000001', '-xn', '1000', '-minh', '10000000', '--mindiffs', '5', '--fasta_width', '0', '--threads', str(threads)] sout, serr, res = _system_call(params) if not res == 0: logger.error('problem with chimera removal for file %s' % seqs_fp) logger.debug('stdout : %s' % sout) logger.debug('stderr : %s' % serr) return output_fp
python
10
0.56068
74
47.529412
17
Remove chimeras de novo using UCHIME (VSEARCH implementation). Parameters ---------- seqs_fp: string file path to FASTA input sequence file output_fp: string file path to store chimera-free results threads : int number of threads (0 for all cores) Returns ------- output_fp the chimera removed fasta file name
function