hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0c865092d1b024900a20974bf4805c2c027168ad | 1,337 | asm | Assembly | src/primes.asm | rprouse/8088 | 7cba221d5dd515144afa0d7bdd875f914e0e8c9a | [
"MIT"
] | null | null | null | src/primes.asm | rprouse/8088 | 7cba221d5dd515144afa0d7bdd875f914e0e8c9a | [
"MIT"
] | null | null | null | src/primes.asm | rprouse/8088 | 7cba221d5dd515144afa0d7bdd875f914e0e8c9a | [
"MIT"
] | null | null | null | ; Calculate primes using the Sieve of Eratosthenes
cpu 8086
bits 16
org 0x0100
table: equ 0x8000
table_size: equ 1000
jmp start
%include 'library.inc'
start:
mov bx,table
mov cx,table_size
mov al,0
; Initialize the memory in the table to zero
.zero_loop:
mov [bx],al ; Write AL to the address pointed to by BX
inc bx
loop .zero_loop ; Decrease CX and jump if non-zero
mov ax,2 ; Start at first prime, 2. AX is the prime we are testing
.check_prime:
mov bx,table ; Set BX to the table address
add bx,ax ; Add the last prime to BX
cmp byte [bx],0 ; Is it a prime? If it is still 0, we haven't marked it as a multiple
jne .next
push ax ; This is a prime, display it
call display_number
mov al,','
call chout
pop ax
mov bx,table
add bx,ax
.mark_multiples:
add bx,ax ; Next multiple of AX
cmp bx,table+table_size
jg .next ; Make sure we're not at the end of the table
mov byte [bx],1 ; Set the value as not-prime in the table
jmp .mark_multiples ; Back and multiply again
.next:
inc ax ; Increment AX to the next number to check
cmp ax,table_size ; Make sure we are not at the end
jne .check_prime
jmp exit | 25.711538 | 93 | 0.615557 | 3.28125 |
284b2688717d354ae6e7444f223b3fae0698eee2 | 1,513 | rb | Ruby | lib/cute_print/formatter.rb | wconrad/cute_print | 9df8f056579324d329030ef0bd6621b3f0fa2aa8 | [
"MIT"
] | 2 | 2015-06-19T17:31:24.000Z | 2017-09-27T19:44:50.000Z | lib/cute_print/formatter.rb | wconrad/cute_print | 9df8f056579324d329030ef0bd6621b3f0fa2aa8 | [
"MIT"
] | 5 | 2015-02-25T20:51:57.000Z | 2018-03-13T19:29:21.000Z | lib/cute_print/formatter.rb | wconrad/cute_print | 9df8f056579324d329030ef0bd6621b3f0fa2aa8 | [
"MIT"
] | null | null | null | require "pp"
require "stringio"
require_relative "format"
require_relative "labeler"
require_relative "location"
require_relative "location_label"
require_relative "source_label"
require_relative "values"
module CutePrint
# @api private
class Formatter
def initialize(opts = {})
@method = opts.fetch(:method)
@out = opts.fetch(:out)
@block = opts.fetch(:block, nil)
@args = opts.fetch(:values, [])
@values = Values.new(@args, @block)
@width = opts.fetch(:width)
@location_label = nil
end
def write
if @values.empty? && !label.empty?
write_line label.chomp(": ")
else
@values.each do |value|
labeler = Labeler.new(@format, @width, label, value)
write_lines labeler.labeled
end
end
end
def with_location(format_key)
location = Location.find
@location_label = LocationLabel.make(format_key, location)
end
def inspect
@format = Format::Inspect.new
end
def pretty_print
@format = Format::PrettyPrint.new
end
private
def write_lines(lines)
lines.each do |line|
write_line line
end
end
def write_line(line)
line += "\n" unless line =~ /\n\Z/
@out.print line
end
def label
@label ||= make_label
end
def make_label
[
(@location_label.to_s if @location_label),
(SourceLabel.new(@block, @method) if @block),
].compact.join
end
end
end
| 20.173333 | 64 | 0.611368 | 3.015625 |
0ce5d95f10a05417cb3b6fc154c24d7adc27cf45 | 1,877 | py | Python | scripts/baxter_find_tf.py | mkrizmancic/qlearn_baxter | 0498315212cacb40334cbb97a858c6ba317f52a3 | [
"MIT"
] | 4 | 2017-11-11T18:16:22.000Z | 2018-11-08T13:31:09.000Z | scripts/baxter_find_tf.py | mkrizmancic/qlearn_baxter | 0498315212cacb40334cbb97a858c6ba317f52a3 | [
"MIT"
] | null | null | null | scripts/baxter_find_tf.py | mkrizmancic/qlearn_baxter | 0498315212cacb40334cbb97a858c6ba317f52a3 | [
"MIT"
] | 2 | 2019-09-04T12:28:58.000Z | 2021-09-27T13:02:48.000Z | #!/usr/bin/env python
"""Calculate transformation matrices and broadcast transform from robot's base to head markers."""
import rospy
import tf
import math
from PyKDL import Vector, Frame, Rotation
if __name__ == '__main__':
rospy.init_node('baxter_find_transformation')
listener = tf.TransformListener()
br = tf.TransformBroadcaster()
rate = rospy.Rate(50)
while not rospy.is_shutdown():
try:
(trans_OH, rot_OH) = listener.lookupTransform('/optitrack', '/bax_head', rospy.Time(0))
(trans_OA, rot_OA) = listener.lookupTransform('/optitrack', '/bax_arm', rospy.Time(0))
(trans_BG, rot_BG) = listener.lookupTransform('/base', '/left_gripper_base', rospy.Time(0))
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
# Rotations
rot_OH = Rotation.Quaternion(*rot_OH)
rot_OA = Rotation.Quaternion(*rot_OA)
rot_BG = Rotation.Quaternion(*rot_BG)
rot_AG = Rotation.RPY(math.pi / 2, -math.pi, math.pi / 2)
# Creating Frames
T_OH = Frame(rot_OH, Vector(*trans_OH))
T_OA = Frame(rot_OA, Vector(*trans_OA))
T_BG = Frame(rot_BG, Vector(*trans_BG))
T_AG = Frame(rot_AG, Vector(0, 0, 0))
# Finding right transformation
T_HB = T_OH.Inverse() * T_OA * T_AG * T_BG.Inverse()
T_empty_p = Vector(0, 0, 0)
T_empty_Q = Rotation.Quaternion(0, 0, 0, 1)
T_empty = Frame(T_empty_Q, T_empty_p)
# Broadcast new transformations
br.sendTransform(T_HB.p, T_HB.M.GetQuaternion(), rospy.Time.now(), 'base', 'bax_head')
br.sendTransform(T_HB.p, T_HB.M.GetQuaternion(), rospy.Time.now(), 'reference/base', 'bax_head')
br.sendTransform(T_empty.p, T_empty.M.GetQuaternion(), rospy.Time.now(), 'world', 'base')
rate.sleep()
| 39.93617 | 104 | 0.64731 | 3.140625 |
ade1192e66419a4f1a0f70babfae972e654e2cc0 | 13,712 | lua | Lua | 3DreamEngine/loader/dae.lua | sewbacca/3DreamEngine | d688b7d04fd7ffdbedaa55b0d26785e78304bbca | [
"MIT"
] | 209 | 2019-04-01T20:58:05.000Z | 2022-03-30T20:02:26.000Z | 3DreamEngine/loader/dae.lua | sewbacca/3DreamEngine | d688b7d04fd7ffdbedaa55b0d26785e78304bbca | [
"MIT"
] | 54 | 2019-03-30T23:58:34.000Z | 2022-02-01T14:20:57.000Z | 3DreamEngine/loader/dae.lua | sewbacca/3DreamEngine | d688b7d04fd7ffdbedaa55b0d26785e78304bbca | [
"MIT"
] | 12 | 2019-03-31T09:50:25.000Z | 2022-03-03T09:52:04.000Z | --[[
#dae - COLLADA
--]]
--load space seperated arrays as floats or as strings
local function loadFloatArray(arr)
local t = { }
for w in arr:gmatch("%S+") do
t[#t+1] = tonumber(w)
end
return t
end
local function loadArray(arr)
local t = { }
for w in arr:gmatch("%S+") do
t[#t+1] = w
end
return t
end
--load entire tree and index all IDs
local indices
local localToGlobal
local function indexTree(node)
for key,child in pairs(node) do
if type(child) == "table" and key ~= "_attr" then
indexTree(child)
end
end
if node._attr and node._attr.id then
indices[node._attr.id] = node
indices["#" .. node._attr.id] = node
if node._attr.sid then
localToGlobal[node._attr.sid] = node._attr.id
end
end
end
return function(self, obj, path)
local xml2lua = require(self.root .. "/libs/xml2lua/xml2lua")
local handler = require(self.root .. "/libs/xml2lua/tree"):new()
--parse
local file = love.filesystem.read(path)
xml2lua.parser(handler):parse(file)
local correction = mat4:getRotateX(-math.pi/2)
local root = handler.root.COLLADA[1]
--get id indices
indices = { }
localToGlobal = { }
indexTree(root)
--load armatures and vertex weights
local armatures = { }
local controllers = { }
if root.library_controllers then
for d,s in ipairs(root.library_controllers[1].controller) do
if s.skin then
local name = s.skin[1]._attr.source:sub(2)
local a = {
weights = { },
joints = { },
jointIDs = { },
}
armatures[name] = a
controllers[s._attr.id] = name
--load sources
local weights = { }
for i,v in ipairs(s.skin[1].source) do
local typ = v.technique_common[1].accessor[1].param[1]._attr.name
if typ == "JOINT" then
a.jointIDs = loadArray(v.Name_array[1][1])
for d,s in ipairs(a.jointIDs) do
a.jointIDs[d] = localToGlobal[s] or s
end
elseif typ == "WEIGHT" then
weights = loadFloatArray(v.float_array[1][1])
end
end
--load weights
local vw = s.skin[1].vertex_weights[1]
local vcount = vw.vcount and loadFloatArray(vw.vcount[1][1]) or { }
local ids = loadFloatArray(vw.v[1][1])
local count = tonumber(vw._attr.count)
local fields = #vw.input
for _,input in ipairs(vw.input) do
local typ = input._attr.semantic
local offset = 1 + tonumber(input._attr.offset)
if typ == "JOINT" then
local ci = 1
for i = 1, count do
local verts = vcount[i] or 1
a.joints[i] = { }
for v = 1, verts do
local id = ids[(ci-1)*fields+offset]
a.joints[i][v] = id+1
ci = ci + 1
end
end
elseif typ == "WEIGHT" then
local ci = 1
for i = 1, count do
local verts = vcount[i] or 1
a.weights[i] = { }
for v = 1, verts do
local id = ids[(ci-1)*fields+offset]
a.weights[i][v] = weights[id+1]
ci = ci + 1
end
end
end
end
--normalize weights and limit to 4 (GPU limit)
for i = 1, #a.weights do
while #a.weights[i] > 4 do
local min, best = math.huge, 1
for d,s in ipairs(a.weights[i]) do
if s < min then
min = s
best = d
end
end
table.remove(a.joints[i], best)
table.remove(a.weights[i], best)
end
--normalize
local sum = 0
for d,s in ipairs(a.weights[i]) do
sum = sum + s
end
if sum > 0 then
for d,s in ipairs(a.weights[i]) do
a.weights[i][d] = s / sum
end
end
end
end
end
end
--load materials
if root.library_materials then
for _,mat in ipairs(root.library_materials[1].material) do
local name = mat._attr.name
local material = self:newMaterial(name)
obj.materials[name] = material
indices[mat._attr.id] = material
--load
if mat.instance_effect then
local effect = indices[mat.instance_effect[1]._attr.url]
--get first profile
local profile
for d,s in pairs(effect) do
profile = s[1]
end
--parse data
if profile then
for step, dataArr in pairs(profile.technique[1]) do
if step ~= "_attr" then
local data = dataArr[1]
if data.emission then
local e = data.emission[1]
if e.color then
local color = loadFloatArray( e.color[1][1] )
material.emission = {color[1] * color[4], color[2] * color[4], color[3] * color[4]}
end
end
if data.diffuse then
local d = data.diffuse[1]
if d.color then
local color = loadFloatArray( d.color[1][1] )
material.color = color
end
end
if data.specular then
local s = data.specular[1]
if s.color then
local color = loadFloatArray( s.color[1][1] )
material.specular = math.sqrt(color[1]^2 + color[2]^2 + color[3]^2)
end
end
if data.shininess then
material.glossiness = tonumber( data.shininess[1].float[1][1] )
end
if data.index_of_refraction then
material.ior = tonumber( data.index_of_refraction[1].float[1][1] )
end
end
end
end
end
end
end
--load main geometry
local meshData = { }
for d,geo in ipairs(root.library_geometries[1].geometry) do
local mesh = geo.mesh[1]
local id = geo._attr.id
meshData[id] = meshData[id] or { }
--translation table
local translate = {
["VERTEX"] = "vertices",
["NORMAL"] = "normals",
["TEXCOORD"] = "texCoords",
["COLOR"] = "colors",
}
--parse vertices
local o
local lastMaterial
local index = 0
local edges = { }
for typ = 1, 3 do
local list
if typ == 1 then
list = mesh.triangles
elseif typ == 2 then
list = mesh.polylist
else
list = mesh.polygons
end
if list then
for _,l in ipairs(list) do
local mat = indices[l._attr.material] or obj.materials.None
local material = self.materialLibrary[mat.name] or mat
if obj.args.splitMaterials then
o = self:newSubObject(geo._attr.id, obj, material)
meshData[id][#meshData[id]+1] = o
index = 0
elseif not o then
o = self:newSubObject(geo._attr.id, obj, material)
meshData[id][#meshData[id]+1] = o
end
--connect with armature
if armatures[o.name] and not o.weights then
o.weights = { }
o.joints = { }
o.jointIDs = armatures[o.name].jointIDs
end
--ids of source components per vertex
local ids
local vcount
if typ == 3 then
ids = { }
vcount = { }
--combine polygons
for _,p in ipairs(l.p) do
local a = loadFloatArray(p[1])
for _,v in ipairs(a) do
ids[#ids+1] = v
end
vcount[#vcount+1] = #a
end
else
ids = loadFloatArray(l.p[1][1])
vcount = l.vcount and loadFloatArray(l.vcount[1][1]) or { }
end
--get max offset
local fields = 0
for d,input in ipairs(l.input) do
fields = tonumber(input._attr.offset) + 1
end
--parse data arrays
local verticeIndex = { }
for d,input in ipairs(l.input) do
local f = translate[input._attr.semantic]
if f then
local s = loadFloatArray( (indices[input._attr.source].input and indices[ indices[input._attr.source].input[1]._attr.source ] or indices[input._attr.source]).float_array[1][1] )
for i = 1, #ids / fields do
local id = ids[(i-1)*fields + tonumber(input._attr.offset) + 1]
if f == "texCoords" then
--xy vector
o[f][index+i] = {
s[id*2+1],
1.0-s[id*2+2],
}
elseif f == "colors" then
--rgba vector
o[f][index+i] = {
s[id*4+1],
s[id*4+2],
s[id*4+3],
s[id*4+4],
}
else
--xyz vectors
o[f][index+i] = {
s[id*3+1],
s[id*3+2],
s[id*3+3]
}
if f == "vertices" then
verticeIndex[index+i] = id
end
--also connect weight and joints
if f == "vertices" and o.weights then
o.weights[index+i] = armatures[o.name].weights[id+1]
o.joints[index+i] = armatures[o.name].joints[id+1]
o.materials[index+i] = material
end
end
end
end
end
--parse polygons
local count = l._attr.count
local i = index+1
for face = 1, count do
local verts = vcount[face] or 3
--store edges
for v = 1, verts do
local a, b = i + v - 1, v == verts and i or (i + v)
local min = math.min(verticeIndex[a], verticeIndex[b])
local max = math.max(verticeIndex[a], verticeIndex[b])
local id = min * 65536 + max
if not edges[id] then
edges[id] = true
o.edges[#o.edges+1] = {a, b}
end
end
if verts == 3 then
--tris
o.faces[#o.faces+1] = {i, i+1, i+2}
else
--triangulates, fan style
for f = 1, verts-2 do
o.faces[#o.faces+1] = {i, i+f, i+f+1}
end
end
i = i + verts
end
index = #o.vertices
end
end
end
end
--load light
local lightIDs = { }
if root.library_lights then
for d,light in ipairs(root.library_lights[1].light) do
local l = self:newLight()
lightIDs[light._attr.id] = l
if light.extra and light.extra[1] and light.extra[1].technique and light.extra[1].technique[1] then
local dat = light.extra[1].technique[1]
l:setColor(dat.red and tonumber(dat.red[1][1]) or 1.0, dat.green and tonumber(dat.green[1][1]) or 1.0, dat.blue and tonumber(dat.blue[1][1]) or 1.0)
l:setBrightness(dat.energy and tonumber(dat.energy[1][1]) or 1.0)
end
table.insert(obj.lights, l)
end
end
local function addObject(name, mesh, transform)
for _,subObject in ipairs(meshData[mesh]) do
local id = name
if obj.args.splitMaterials then
id = id .. "_" .. subObject.material.name
end
obj.objects[id] = subObject:clone()
obj.objects[id].name = name
obj.objects[id].transform = correction * transform
end
end
--load scene
for d,s in ipairs(root.library_visual_scenes[1].visual_scene[1].node) do
obj.joints = { }
if s.instance_geometry then
--object
local id = s.instance_geometry[1]._attr.url:sub(2)
local name = s._attr.name or s._attr.id
local transform = mat4(loadFloatArray(s.matrix[1][1]))
addObject(name, id, transform)
elseif s.instance_light then
local transform = correction * mat4(loadFloatArray(s.matrix[1][1]))
local l = lightIDs[s.instance_light[1]._attr.url:sub(2)]
l:setPosition(transform[4], transform[8], transform[12])
elseif s._attr.name == "Armature" then
--propably an armature
--TODO: not a proper way to identify armature nodes
local function skeletonLoader(nodes, parentTransform)
local skel = { }
for d,s in ipairs(nodes) do
if s.instance_controller then
--object associated with skeleton
local id = s.instance_controller[1]._attr.url:sub(2)
local mesh = controllers[id]
local name = s._attr.name or s._attr.id
local transform = mat4(loadFloatArray(s.matrix[1][1]))
addObject(name, mesh, transform)
end
if s._attr.type == "JOINT" then
local name = s._attr.id
local m = mat4(loadFloatArray(s.matrix[1][1]))
local bindTransform = parentTransform and parentTransform * m or m
skel[name] = {
name = name,
bindTransform = m,
inverseBindTransform = bindTransform:invert(),
}
obj.joints[name] = skel[name]
if s.node then
skel[name].children = skeletonLoader(s.node, bindTransform)
end
end
end
return skel
end
obj.skeleton = skeletonLoader(s.node)
break
end
end
--load animations
if root.library_animations then
local animations = { }
local function loadAnimation(anim)
for _,a in ipairs(anim) do
if a.animation then
loadAnimation(a.animation)
else
local keyframes = { }
local name = a.channel[1]._attr.target:sub(1, -11)
--parse sources
local sources = { }
for d,s in ipairs(a.source) do
sources[s._attr.id] = s.float_array and loadFloatArray(s.float_array[1][1]) or s.Name_array and loadArray(s.Name_array[1][1])
end
for d,s in ipairs(a.sampler[1].input) do
sources[s._attr.semantic] = sources[s._attr.source:sub(2)]
end
--get matrices
local frames = { }
local positions = { }
for i = 1, #sources.OUTPUT / 16 do
local m = mat4(unpack(sources.OUTPUT, i*16-15, i*16))
frames[#frames+1] = {
time = sources.INPUT[i],
--interpolation = sources.INTERPOLATION[i],
rotation = quat.fromMatrix(m:subm()),
position = vec3(m[4], m[8], m[12]),
}
end
--pack
animations[name] = frames
end
end
end
loadAnimation(root.library_animations[1].animation)
--split animations
if obj.args.animations then
obj.animations = { }
obj.animationLengths = { }
for anim, time in pairs(obj.args.animations) do
obj.animations[anim] = { }
obj.animationLengths[anim] = time[2] - time[1]
for joint, frames in pairs(animations) do
local newFrames = { }
for i, frame in ipairs(frames) do
if frame.time >= time[1] and frame.time <= time[2] then
table.insert(newFrames, frame)
end
end
obj.animations[anim][joint] = newFrames
end
end
else
obj.animations = {
default = animations,
}
obj.animationLengths = {
default = animations[#animations].time,
}
end
end
end | 26.573643 | 184 | 0.589192 | 3.296875 |
f024f2d1468cd63a89d1e5336dc2508a4542b04f | 1,476 | py | Python | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | 1 | 2022-01-25T22:17:55.000Z | 2022-01-25T22:17:55.000Z | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | null | null | null | Stack/10-stack-special-design-and-implement.py | mahmutcankurt/DataStructures_Python | bfb81e3530b535c4e48c07548dc4a4f9a648bab2 | [
"MIT"
] | null | null | null | class Stack:
def __init__(self):
self.array = []
self.top = -1
self.max = 100
def isEmpty(self):
if(self.top == -1):
return True
else:
return False
def isFull(self):
if(self.top == self.max -1):
return True
else:
return False
def push(self, data):
if(self.isFull()):
print("Stack Overflow")
return
else:
self.top += 1
self.array.append(data)
def pop(self):
if(self.isEmpty()):
print("Stack Underflow")
return
else:
self.top -= 1
return(self.array.pop())
class SpecialStack(Stack):
def __init__(self):
super().__init__()
self.Min = Stack()
def push(self, x):
if(self.isEmpty):
super().push(x)
self.Min.push(x)
else:
super().push(x)
y = self.Min.pop()
self.Min.push(y)
if(x <= y):
self.Min.push(x)
else:
self.Min.push(y)
def pop(self):
x = super().pop()
self.Min.pop()
return x
def getMin(self):
x = self.Min.pop()
self.Min.push(x)
return x
if __name__ == "__main__":
s = SpecialStack()
s.push(10)
s.push(20)
s.push(30)
print(s.getMin())
s.push(5)
print(s.getMin()) | 20.219178 | 36 | 0.443767 | 3.5 |
2069a8783ef5257f23ae89a2c54877facee8a7e6 | 1,961 | lua | Lua | lua/twilight/colors/init.lua | jzone1366/twilight.nvim | da72643da7b73745ce5b56dd79340446949acf7f | [
"MIT"
] | 1 | 2022-03-14T23:15:29.000Z | 2022-03-14T23:15:29.000Z | lua/twilight/colors/init.lua | jzone1366/twilight.nvim | da72643da7b73745ce5b56dd79340446949acf7f | [
"MIT"
] | 1 | 2022-03-15T08:23:39.000Z | 2022-03-15T14:31:22.000Z | lua/twilight/colors/init.lua | jzone1366/twilight.nvim | da72643da7b73745ce5b56dd79340446949acf7f | [
"MIT"
] | null | null | null | local M = {}
M.styles = {
"light",
"dark",
}
-- Adds subtle and harsh colors depending if the colors are dark or light
-- @param colors table
-- @return table of colors
local function construct(colors)
colors.harsh = colors.meta.light and colors.black or colors.white
colors.subtle = colors.meta.light and colors.white or colors.black
return colors
end
-- Returns a color table based on the name provided
-- This returns the initial colors defined by the colorscheme
-- without overrides from the configuration
-- If name is not found will default to light
-- If the style is invalid the it will return light colors
-- @param name string (optional)
-- @return table of colors
function M.init(name)
name = name or require("twilight.config").options.style
if name == "random" then
local index = math.random(#M.styles)
return construct(require("twilight.colors." .. M.styles[index]).init())
end
for _, style in ipairs(M.styles) do
if style == name then
return construct(require("twilight.colors." .. name).init())
end
end
require("twilight.util").warn("colorscheme " .. name .. " was not found")
return construct(require("twilight.colors.light").init())
end
-- Return color table based on the name provided
-- If no name is provided it will return the style set in the config
-- If the style defined in the configuration is invalid the it will return light colors
-- @param name string (optional)
-- @return table of colors
function M.load(name)
name = name or require("twilight.config").options.style
if name == "random" then
local index = math.random(#M.styles)
return construct(require("twilight.colors." .. M.styles[index]).load())
end
for _, style in ipairs(M.styles) do
if style == name then
return construct(require("twilight.colors." .. name).load())
end
end
require("twilight.util").warn("colorscheme " .. name .. " was not found")
return construct(require("twilight.colors.light").load())
end
return M
| 29.712121 | 87 | 0.720551 | 3.421875 |
43c9ae59a393ebfbeb768d3575fb36a4c0db3588 | 1,578 | go | Go | storage/s3_func.go | OgreCase/kit | bebf00292e30262a2fc33b0c544e3e3de27194de | [
"Apache-2.0"
] | null | null | null | storage/s3_func.go | OgreCase/kit | bebf00292e30262a2fc33b0c544e3e3de27194de | [
"Apache-2.0"
] | null | null | null | storage/s3_func.go | OgreCase/kit | bebf00292e30262a2fc33b0c544e3e3de27194de | [
"Apache-2.0"
] | 1 | 2022-01-10T09:13:38.000Z | 2022-01-10T09:13:38.000Z | package storage
import (
"bytes"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/globalsign/mgo/bson"
"mime/multipart"
"net/http"
"path/filepath"
)
func UploadFileToS3(s *session.Session, fileHeader *multipart.FileHeader) (string, error) {
// get the file size and read
// the file content into a buffer
size := fileHeader.Size
buffer := make([]byte, size)
file, err := fileHeader.Open()
if err != nil {
return "", err
}
file.Read(buffer)
// create a unique file name for the file
// 此处文件名称即为上传到aws bucket的名称,也是文件url路径的一部分。也可以在此处拼接url全路径,把域名部分拼接在前边即可。
tempFileName := "pictures/" + bson.NewObjectId().Hex() + filepath.Ext(fileHeader.Filename)
// config settings: this is where you choose the bucket,
// filename, content-type and storage class of the file
// you're uploading
_, err = s3.New(s).PutObject(&s3.PutObjectInput{
Bucket: aws.String("test-bucket"), // bucket名称,把自己创建的bucket名称替换到此处即可
Key: aws.String(tempFileName),
ACL: aws.String("public-read"), // could be private if you want it to be access by only authorized users
Body: bytes.NewReader(buffer),
ContentLength: aws.Int64(int64(size)),
ContentType: aws.String(http.DetectContentType(buffer)),
ContentDisposition: aws.String("attachment"),
ServerSideEncryption: aws.String("AES256"),
StorageClass: aws.String("INTELLIGENT_TIERING"),
})
if err != nil {
return "", err
}
return tempFileName, err
}
| 32.204082 | 123 | 0.680608 | 3.140625 |
0cdcd31b1d541c0b2fc7fa87f9fe6a1fb877291b | 4,997 | py | Python | rdsslib/kinesis/client.py | JiscSD/rdss-shared-libraries | cf07cad3f176ef8be1410fc29b240fb4791e607a | [
"Apache-2.0"
] | null | null | null | rdsslib/kinesis/client.py | JiscSD/rdss-shared-libraries | cf07cad3f176ef8be1410fc29b240fb4791e607a | [
"Apache-2.0"
] | 4 | 2018-02-15T12:32:26.000Z | 2018-03-06T16:33:34.000Z | rdsslib/kinesis/client.py | JiscSD/rdss-shared-libraries | cf07cad3f176ef8be1410fc29b240fb4791e607a | [
"Apache-2.0"
] | 1 | 2018-03-13T19:38:54.000Z | 2018-03-13T19:38:54.000Z | import json
import logging
from .errors import MaxRetriesExceededException, DecoratorApplyException
MAX_ATTEMPTS = 6
class KinesisClient(object):
def __init__(self, writer, reader):
"""
Writes and reads messages to and from Kinesis streams
:param writer: handles writing of payloads to Kinesis stream
:param reader: handles reading of payloads from Kinesis stream
:type writer: writer.StreamWriter
:type reader: reader.StreamReader
"""
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.INFO)
self.writer = writer
self.reader = reader
def write_message(self, stream_names, payload, max_attempts=MAX_ATTEMPTS):
"""Write a payload into each stream in stream_names
:param stream_names: Kinesis streams to write to
:param payload: JSON payload
:param max_attempts: maximum number of times to attempt writing
:type stream_names: list of str
:type payload: str
"""
for stream_name in stream_names:
self.writer.put_stream(stream_name, payload, max_attempts)
def read_messages(self, stream_name, seq_number=None):
"""Continuous loop that reads messages from stream_name
:param stream_name: Name of Kinesis stream to read from
:param seq_number: Optional seq number
:type stream_name: str
:return message_gen: Yields messages read from Kinesis stream
:rtype message_gen: generator
"""
message_gen = self.reader.read_stream(
stream_name, seq_number=seq_number)
return message_gen
class EnhancedKinesisClient(KinesisClient):
def __init__(self, writer, reader, error_handler, decorators=None):
"""
Writes and reads messages to and from Kinesis streams with
error handling and message decoration
:param writer: Writes messages to Kinesis stream
:param reader: Reads messages from Kinesis stream
:param error_handler: Handles messages with errors
:param decorators: Enhance messages with extra fields
:type writer: writer.StreamWriter
:type reader: reader.StreamReader
:type error_handler: handlers.MessageErrorHandler
:type decorators: list
"""
super().__init__(writer, reader)
if decorators:
self.decorators = decorators
else:
self.decorators = []
self.error_handler = error_handler
def _apply_decorators(self, payload):
"""
Applies a sequence of decorators that
enhance and modify the contents of a payload
:param payload: Undecorated JSON payload
:type payload: str
:return payload: Decorated JSON payload
:rtype payload: str
"""
decorated_payload = payload
for decorator in self.decorators:
try:
decorated_payload = decorator.process(payload)
except Exception:
self.logger.warning(
'Failed to apply decorator {}'.format(decorator.name))
raise DecoratorApplyException()
return decorated_payload
def write_message(self, stream_names, payload, max_attempts=MAX_ATTEMPTS):
"""Write a payload into each stream in stream_names
:param stream_names: Kinesis streams to write to
:param payload: JSON payload
:param max_attempts: Max number of times to attempt writing
:type stream_names: list of str
:type payload: str
:type max_attempts: int
"""
try:
json.loads(payload)
except json.decoder.JSONDecodeError:
self.error_handler.handle_invalid_json(payload)
return
decorated_payload = self._apply_decorators(payload)
for stream_name in stream_names:
try:
super().write_message([stream_name],
decorated_payload,
max_attempts)
except MaxRetriesExceededException as e:
stream_name = e.args[0]
error_code = 'GENERR005'
error_description = 'Maximum retry attempts {0} exceed'\
'for stream {1}'.format(max_attempts,
stream_name)
self.error_handler.handle_error(decorated_payload,
error_code,
error_description)
def handle_error(self, payload, error_code, error_description):
""" Allows errors to be posted to the stream occurring from
activities like payload validation
:param payload: JSON payload
:param error_code: Error Code
:param error_description: Description Of Error
"""
self.error_handler.handle_error(payload, error_code, error_description)
| 39.346457 | 79 | 0.626976 | 3.34375 |
70b4a560218bd2b4ae7350c0aabd5d5072a724e9 | 2,089 | go | Go | pkg/util/file/volatile_file/volatile_file.go | dizzy57/flow | cc1282eb8a54943686115a95468101835cdce481 | [
"MIT"
] | null | null | null | pkg/util/file/volatile_file/volatile_file.go | dizzy57/flow | cc1282eb8a54943686115a95468101835cdce481 | [
"MIT"
] | null | null | null | pkg/util/file/volatile_file/volatile_file.go | dizzy57/flow | cc1282eb8a54943686115a95468101835cdce481 | [
"MIT"
] | null | null | null | package file
import (
"fmt"
"io/ioutil"
"sync"
log "github.com/sirupsen/logrus"
event "github.com/awesome-flow/flow/pkg/util/file/event"
"github.com/fsnotify/fsnotify"
)
const (
VFPermDefault = 0644
)
type VolatileFile struct {
path string
once *sync.Once
watcher *fsnotify.Watcher
notify chan *event.Event
}
func New(path string) (*VolatileFile, error) {
w, err := fsnotify.NewWatcher()
if err != nil {
return nil, err
}
vf := &VolatileFile{
path: path,
once: &sync.Once{},
watcher: w,
notify: make(chan *event.Event),
}
return vf, nil
}
func (vf *VolatileFile) Deploy() error {
log.Infof("Deploying a watcher for path: %s", vf.path)
vf.once.Do(func() {
go func() {
for ntf := range vf.watcher.Events {
log.Infof("Received a new fsnotify notification: %s", ntf)
switch ntf.Op {
case fsnotify.Create:
vf.notify <- event.New(event.Create)
case fsnotify.Write:
vf.notify <- event.New(event.Update)
case fsnotify.Remove:
vf.notify <- event.New(event.Delete)
default:
log.Infof("Ignored event: %s", ntf.String())
}
}
}()
vf.watcher.Add(vf.path)
})
return nil
}
func (vf *VolatileFile) TearDown() error {
log.Infof("Removing the watcher for path: %s", vf.path)
return vf.watcher.Remove(vf.path)
}
func (vf *VolatileFile) ReadRawData() ([]byte, error) {
rawData, err := ioutil.ReadFile(vf.path)
if err != nil {
return nil, err
}
return rawData, nil
}
func (vf *VolatileFile) ReadData() (interface{}, error) {
return vf.ReadRawData()
}
func (vf *VolatileFile) WriteData(data interface{}) error {
rawData, err := vf.EncodeData(data)
if err != nil {
return err
}
return ioutil.WriteFile(vf.path, rawData, VFPermDefault)
}
func (vf *VolatileFile) GetPath() string {
return vf.path
}
func (vf *VolatileFile) GetNotifyChan() chan *event.Event {
return vf.notify
}
func (vf *VolatileFile) EncodeData(data interface{}) ([]byte, error) {
if byteData, ok := data.([]byte); ok {
return byteData, nil
}
return nil, fmt.Errorf("Failed to convert data to []byte")
}
| 20.281553 | 70 | 0.662518 | 3.03125 |
14992220885c7a8d417972337b8a383c2ae2eb5f | 2,756 | lua | Lua | Aetheri/aetheri/species/appearance.lua | cuteBoiButt/sb.StardustSuite | 3c442c94192df257f46e08afc9f3ff8b5a6f2016 | [
"MIT"
] | 30 | 2016-09-17T21:28:00.000Z | 2022-03-31T04:59:51.000Z | Aetheri/aetheri/species/appearance.lua | cuteBoiButt/sb.StardustSuite | 3c442c94192df257f46e08afc9f3ff8b5a6f2016 | [
"MIT"
] | 22 | 2016-10-16T01:37:24.000Z | 2021-11-29T20:47:52.000Z | Aetheri/aetheri/species/appearance.lua | cuteBoiButt/sb.StardustSuite | 3c442c94192df257f46e08afc9f3ff8b5a6f2016 | [
"MIT"
] | 14 | 2016-12-17T18:59:03.000Z | 2022-03-03T00:58:22.000Z | -- handles all appearance and animation apart from the HUD
require "/lib/stardust/color.lua"
appearance = {
baseDirectives = "",
}
local bodyReplacePalette = {
"dafafafa", "caeaeafa", "badadafa", "aacacafa"
}
local function generatePalette(tbl)
local hue = tbl[1]
local sat = tbl[2]
local lumBright = tbl[3]
local lumDark = tbl[4]
return {
color.toHex(color.fromHsl{ hue, sat, lumBright }),
color.toHex(color.fromHsl{ hue, sat, util.lerp(1/3, lumBright, lumDark) }),
color.toHex(color.fromHsl{ hue, sat, util.lerp(2/3, lumBright, lumDark) }),
color.toHex(color.fromHsl{ hue, sat, lumDark })
}
end
local directives = ""
local updateGlow
function appearance.updateColors()
appearance.settings = status.statusProperty("aetheri:appearance", { })
local a = appearance.settings
if not a.coreHsl then
local name = world.entityName(entity.id())
a.coreHsl = { -- start with a randomized core color based on your name!
sb.staticRandomDoubleRange(0.0, 1.0, name, "core hue"), -- random hue
1.0 - sb.staticRandomDoubleRange(0.0, 1.0, name, "core saturation")^2, -- biased toward saturated
math.min(1, sb.staticRandomI32Range(0, 4, name, "bright or dark?")), -- 1 in 5 chance to start dark
sb.staticRandomDoubleRange(0.3, 0.7, name, "border brightness")
}
--playerext.message("generated values: " .. util.tableToString(a.coreHsl))
end
a.palette = generatePalette(a.coreHsl)
a.glowColor = color.fromHsl {
a.coreHsl[1],
a.coreHsl[2],
0.5 + (((a.coreHsl[3] + a.coreHsl[4]) / 2) - 0.5) * 0.5 -- average luma, pushed towards 0.5 (full vivid)
}
status.setStatusProperty("aetheri:appearance", a)
local d = {
"?replace;663b14fe=00000000;8d581cfe=00000000;c88b28fe=00000000;e7c474fe=00000000;404040fe=00000000;808080fe=00000000;6d0103fe=00000000;02da37fe=00000000;5786fffe=00000000",
color.replaceDirective(bodyReplacePalette, a.palette, true),
}
appearance.baseDirectives = table.concat(d)
tech.setParentDirectives(appearance.baseDirectives)
playerext.setGlowColor(color.lightColor(a.glowColor, 0.8))
world.sendEntityMessage(entity.id(), "aetheri:paletteChanged")
world.sendEntityMessage(entity.id(), "startech:refreshEnergyColor")
updateGlow = true
end
function appearance.update(p)
if updateGlow then
updateGlow = false
local a = appearance.settings
playerext.setGlowColor(color.lightColor(a.glowColor, 0.8))
end
end
-- register these here since this is executed during techstub init
message.setHandler("aetheri:refreshAppearance", appearance.updateColors)
message.setHandler("startech:getEnergyColor", function()
local p = appearance.settings.palette
return { p[1], p[3], p[4] } -- somewhat cut down palette
end)
| 35.333333 | 177 | 0.714078 | 3.125 |
74d046024ccef2a8077c21d99f32704efcf988c9 | 6,818 | js | JavaScript | myqq-webapp/src/views/Login/index.js | WHUT-XGP/chat-webapp | 53eba4e223411ca5b988857c56a38cc962d3c27e | [
"MIT"
] | 23 | 2020-12-25T08:39:11.000Z | 2022-03-23T07:12:23.000Z | myqq-webapp/src/views/Login/index.js | WHUT-XGP/chat-webapp | 53eba4e223411ca5b988857c56a38cc962d3c27e | [
"MIT"
] | 1 | 2022-01-02T14:31:28.000Z | 2022-01-02T14:31:28.000Z | myqq-webapp/src/views/Login/index.js | WHUT-XGP/chat-webapp | 53eba4e223411ca5b988857c56a38cc962d3c27e | [
"MIT"
] | null | null | null | import React, { useState, useEffect, useCallback } from 'react'
import { connect } from 'react-redux'
// 导入store相关
import { actionCreator } from './store'
// 导入CSS
import { LoginStyle } from './style'
// 导入组件
import Icon from '../../components/context/Icon'
import LoginInput from '../../components/common/LoginInput'
import Dialog from '../../components/common/Dialog'
import Loading from '../../components/common/loading'
import Toast from '../../components/common/Toast'
import { getInfo } from '../../api/LoginRequest'
function Login(props) {
// 登录用户名和密码
const { loading, error, history, register, token } = props
const { getLogin, changeToken, changeLoading, changeIsError, registerUser, changeRegister } = props;
const [password, setPassword] = useState('');
const [username, setUsername] = useState('');
const [isLoginStatus, setIsLoginStatus] = useState(true);
const [confirmPassword, setConfirmPassword] = useState('');
const [toast, setToast] = useState(false);
const [content, setContent] = useState('');
// 设置错误提示事件
// 通过useCallback改写
const changeToast = useCallback((content) => {
setContent(content)
setToast(true)
// 两秒后消失
setTimeout(() => {
setToast(false)
}, 2000);
}, [setToast, setContent])
// 从本地获取token
useEffect(() => {
const localToken = localStorage.getItem('token');
if (localToken) {
changeToken(localToken);
}
}, [changeToken])
// 登录成功的逻辑处理
useEffect(() => {
if (token) {
// 存进本地
getInfo('', token).then(() => {
localStorage.setItem('token', token)
history.push('/home/message')
}).catch((err) => {
console.log(err)
})
}
}, [token, history])
// 中途出错的逻辑处理
useEffect(() => {
if (error) {
changeToast(isLoginStatus ? '密码或用户名错误' : '用户名已存在')
// 重置
changeIsError(false)
}
}, [error, changeIsError, isLoginStatus,changeToast])
// 注册成功
useEffect(() => {
if (register) {
changeToast('恭喜你! 注册成功!')
changeRegister(false);
setTimeout(() => {
setIsLoginStatus(true);
}, 500);
}
}, [register, changeRegister,changeToast])
return (
<LoginStyle>
{/**标志 */}
<div className="icon-box">
<a href="/"><Icon xlinkHref='#icon-crew_react-copy'></Icon></a>
<span>MyQQ</span>
</div>
{/**登录输入框 */}
{
isLoginStatus && (<div className="input-box">
<LoginInput xlinkHref='#icon-morentouxiang' type="text" value={username} handleInput={(e) => {
setUsername(e)
}} placeHolder="请输入用户名" />
<LoginInput xlinkHref='#icon-mima' type="password" value={password} placeHolder="请输入密码" handleInput={(e) => {
setPassword(e)
}} />
</div>)
}
{/**注册输入框 */}
{
!isLoginStatus && (<div className="input-box">
<LoginInput xlinkHref='#icon-morentouxiang' type="text" value={username} handleInput={(e) => {
setUsername(e)
}} placeHolder="请输入用户名" />
<LoginInput xlinkHref='#icon-mima' type="password" value={password} placeHolder="请输入密码" handleInput={(e) => {
setPassword(e)
}} />
<LoginInput xlinkHref={confirmPassword === "" ? "#icon-crew_react" : confirmPassword === password ? '#icon-querenmima' : '#icon-cuowu'} type="password" value={confirmPassword} placeHolder="确认密码" handleInput={(e) => {
setConfirmPassword(e)
}} />
</div>)
}
{/**控制按钮 */}
<div className='button-go' style={{ animation: loading ? "circle 1s linear infinite" : "" }} onClick={() => {
if (isLoginStatus) {
// 登录 通过redux获取数据
if (username && password) {
getLogin(username, password)
changeLoading(true)
} else {
changeToast('信息不足,请完成填写')
}
} else {
// 注册
if (username && password && password === confirmPassword) {
registerUser(username, password)
changeLoading(true);
} else {
changeToast('请完成填写')
}
}
}} >
<Icon xlinkHref='#icon-denglu' size="1.3rem" />
</div>
{/**切换按钮 */}
<span style={{ marginTop: '1rem', fontSize: "0.8rem", textDecoration: 'underline', color: '#3F91CF' }} onClick={() => {
setIsLoginStatus(!isLoginStatus)
}}
>{isLoginStatus ? '点我注册' : '切换登录'}</span>
{/**加载提示组件 */}
<Dialog open={props.loading} title='加载中...' >
<Loading />
</Dialog>
{/** 轻提示组件*/}
<Toast open={toast} content={content}></Toast>
</LoginStyle>
)
}
// 配置redux映射关系
const mapStateToProps = (state) => {
return {
token: state.LoginReducer.token,
userInfo: state.LoginReducer.userInfo,
loading: state.LoginReducer.loading,
isLogin: state.LoginReducer.isLogin,
error: state.LoginReducer.isError,
register: state.LoginReducer.isRegister
}
}
const mapDispatchToProps = (dispatch) => {
return {
getLogin: (username, password) => {
dispatch(actionCreator.getLogin(username, password))
},
getInfo: (username) => {
dispatch(actionCreator.getUserInfo(username))
},
changeToken: (token) => {
dispatch(actionCreator.tokenChange(token))
},
changeLoading: (status) => {
dispatch(actionCreator.changeLoadingStatus(status))
},
changeIsLogin: (status) => {
dispatch(actionCreator.changeIsLoginStatus(status))
},
changeIsError: (status) => {
dispatch(actionCreator.changeErrorStatus(status))
},
registerUser: (username, password) => {
dispatch(actionCreator.getRegister(username, password))
},
changeRegister: (status) => {
dispatch(actionCreator.changeRegisterStatus(status))
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(React.memo(Login)) | 35.510417 | 236 | 0.512174 | 3.078125 |
0be4094ec9c88b491ea00f03e9587e97033d9ed4 | 5,404 | js | JavaScript | src/app/main/registros/Registros.js | lucianoarmoa98/indufar_prospeccion_medica-campos-recetas-main | bb075c325597be524f58a74f5a8ae6a9ba59291c | [
"MIT"
] | null | null | null | src/app/main/registros/Registros.js | lucianoarmoa98/indufar_prospeccion_medica-campos-recetas-main | bb075c325597be524f58a74f5a8ae6a9ba59291c | [
"MIT"
] | null | null | null | src/app/main/registros/Registros.js | lucianoarmoa98/indufar_prospeccion_medica-campos-recetas-main | bb075c325597be524f58a74f5a8ae6a9ba59291c | [
"MIT"
] | null | null | null | // React y Redux.
import React from 'react';
import {connect} from 'react-redux';
import {bindActionCreators} from 'redux';
import * as registrosActions from './store/actions';
// Material UI.
import MaterialTable, {MTableToolbar} from 'material-table-hotfix-initial-page-remote-data';
// Otros.
import {FusePageSimple} from '@fuse';
import {configuracionDeTabla} from './RegistrosConfig';
import {
construirParametrosDePaginacion,
END_POINT_REGISTROS,
languageConfig
} from '../UIUtils';
import {withRouter} from 'react-router-dom';
import {Paper} from '@material-ui/core';
import Button from '@material-ui/core/Button';
import BackupIcon from '@material-ui/icons/Backup';
import Footer from '../../../components/Form/Footer';
class Registros extends React.Component {
_isMounted = false;
constructor(props) {
super(props);
this.state = {
selectedFile: null
};
this.tableRef = React.createRef();
this.onChangeHandler = this.onChangeHandler.bind(this);
}
componentDidMount() {
this._isMounted = true;
}
componentWillUnmount() {
this._isMounted = false;
}
onChangeHandler(event) {
this.props.formEditSubmit(event.target.files[0]);
}
render() {
const {
registros: {list, formEdit},
listChangePage,
listChangeQuantityPerPage,
setSearchValue
} = this.props;
const {isSubmiting, success, error} = formEdit;
return (
<FusePageSimple
content={
<div className="p-24">
<MaterialTable
title={configuracionDeTabla.titulo}
columns={configuracionDeTabla.columnas}
tableRef={this.tableRef}
data={query => (
new Promise(resolve => {
let url = construirParametrosDePaginacion(query, END_POINT_REGISTROS);
fetch(url)
.then(response => response.json())
.then(result => {
if (!this._isMounted) {
return;
}
if (setSearchValue) {
setSearchValue(query.search);
}
resolve({
data: result.data,
page: result.paginaActual,
totalCount: result.totalRegistros
});
});
})
)}
components={{
Container: props => <Paper {...props} elevation={0}/>,
Toolbar: props => (
<div>
<MTableToolbar {...props} />
{/*<div style={{*/}
{/* display: 'flex',*/}
{/* flexDirection: 'row-reverse',*/}
{/* height: 56*/}
{/*}}>*/}
{/* <input*/}
{/* id="contained-button-file"*/}
{/* type="file"*/}
{/* multiple*/}
{/* name="file"*/}
{/* onChange={this.onChangeHandler}*/}
{/* style={{display: 'none'}}/>*/}
{/* <label htmlFor="contained-button-file">*/}
{/* <Button*/}
{/* component="span"*/}
{/* size='small'*/}
{/* variant="contained"*/}
{/* disableElevation*/}
{/* style={{*/}
{/* alignSelf: 'center',*/}
{/* marginRight: 16*/}
{/* }}*/}
{/* color='secondary'*/}
{/* startIcon={<BackupIcon />}>*/}
{/* Importar Excel*/}
{/* </Button>*/}
{/* </label>*/}
{/* <div style={{width: 400, marginLeft: 16, marginRight: 16}}>*/}
{/* <Footer*/}
{/* submitting={isSubmiting}*/}
{/* error={error}*/}
{/* success={success}/>*/}
{/* </div>*/}
{/*</div>*/}
</div>
)
}}
onChangePage={listChangePage}
onChangeRowsPerPage={listChangeQuantityPerPage}
localization={languageConfig}
options={{
pageSize: list.pageSize,
pageSizeOptions: list.pageSizeOptions,
initialPage: list.page,
searchText: list.searchText,
padding: 'dense',
actionsColumnIndex: -1,
debounceInterval: 900
}}/>
</div>
}/>
);
}
}
function mapStateToProps({registros}) {
return {registros};
}
function mapDispatchToProps(dispatch) {
return bindActionCreators(
{
listChangePage: registrosActions.listChangePage,
listChangeQuantityPerPage: registrosActions.listChangeQuantityPerPage,
setSearchValue: registrosActions.setSearchValue,
changeFilterValue: registrosActions.changeFilterValue,
formEditSubmit: registrosActions.formEditSubmit
},
dispatch
);
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(Registros));
| 33.358025 | 92 | 0.477979 | 3 |
f51ea74045d5aa8e7f16bc66c89fef08f2dc1661 | 677 | lua | Lua | plugin/src/FormatScript/Vendor/Llama/List/zip.lua | howmanysmall/StyluaPlugin | a5f10432a82f68f2d746723007638e6240f759e9 | [
"MIT"
] | null | null | null | plugin/src/FormatScript/Vendor/Llama/List/zip.lua | howmanysmall/StyluaPlugin | a5f10432a82f68f2d746723007638e6240f759e9 | [
"MIT"
] | null | null | null | plugin/src/FormatScript/Vendor/Llama/List/zip.lua | howmanysmall/StyluaPlugin | a5f10432a82f68f2d746723007638e6240f759e9 | [
"MIT"
] | null | null | null | local Debug = require(script.Parent.Parent.Parent.Debug)
local Typer = require(script.Parent.Parent.Parent.Typer)
local Debug_Assert = Debug.Assert
local Typer_Array = Typer.Array
local function zip(...)
local new = {}
local argCount = select("#", ...)
if argCount <= 0 then
return new
end
local firstList = Debug_Assert(Typer_Array(select(1, ...)))
local minLen = #firstList
for i = 2, argCount do
local list = Debug_Assert(Typer_Array(select(i, ...)))
local len = #list
if len < minLen then
minLen = len
end
end
for i = 1, minLen do
new[i] = {}
for j = 1, argCount do
new[i][j] = select(j, ...)[i]
end
end
return new
end
return zip
| 17.358974 | 60 | 0.661743 | 3.09375 |
e8e12c70a26b28e73712420fd03691434cb4267c | 13,354 | py | Python | adversarial-transfer-nlp/CW_attack.py | AI-secure/Uncovering-the-Connections-BetweenAdversarial-Transferability-and-Knowledge-Transferability | a2fb10f56618c6d6dd1638967d59c4a83ffa1c05 | [
"CC0-1.0"
] | 8 | 2021-06-18T10:32:27.000Z | 2022-01-16T06:46:25.000Z | adversarial-transfer-nlp/CW_attack.py | AI-secure/Does-Adversairal-Transferability-Indicate-Knowledge-Transferability | a2fb10f56618c6d6dd1638967d59c4a83ffa1c05 | [
"CC0-1.0"
] | 2 | 2021-08-25T15:14:12.000Z | 2022-02-09T23:55:46.000Z | adversarial-transfer-nlp/CW_attack.py | AI-secure/Does-Adversairal-Transferability-Indicate-Knowledge-Transferability | a2fb10f56618c6d6dd1638967d59c4a83ffa1c05 | [
"CC0-1.0"
] | null | null | null | import sys
import torch
import numpy as np
from torch import optim
from util import args
class CarliniL2:
def __init__(self, targeted=True, search_steps=None, max_steps=None, cuda=True, debug=False, num_classes=14):
self.debug = debug
self.targeted = targeted
self.num_classes = num_classes
self.confidence = args.confidence # FIXME need to find a good value for this, 0 value used in paper not doing much...
self.initial_const = args.const # bumped up from default of .01 in reference code
self.binary_search_steps = search_steps or 1
self.repeat = self.binary_search_steps >= 10
self.max_steps = max_steps or args.max_steps
self.abort_early = True
self.cuda = cuda
self.mask = None
self.batch_info = None
self.wv = None
self.seq = None
self.seq_len = None
self.init_rand = False # an experiment, does a random starting point help?
def _compare(self, output, target):
if not isinstance(output, (float, int, np.int64)):
output = np.copy(output)
# if self.targeted:
# output[target] -= self.confidence
# else:
# output[target] += self.confidence
output = np.argmax(output)
if self.targeted:
return output == target
else:
return output != target
def _compare_untargeted(self, output, target):
if not isinstance(output, (float, int, np.int64)):
output = np.copy(output)
# if self.targeted:
# output[target] -= self.confidence
# else:
# output[target] += self.confidence
output = np.argmax(output)
if self.targeted:
return output == target + 1 or output == target - 1
else:
return output != target
def _loss(self, output, target, dist, scale_const):
# compute the probability of the label class versus the maximum other
real = (target * output).sum(1)
other = ((1. - target) * output - target * 10000.).max(1)[0]
if self.targeted:
# if targeted, optimize for making the other class most likely
loss1 = torch.clamp(other - real + self.confidence, min=0.) # equiv to max(..., 0.)
else:
# if non-targeted, optimize for making this class least likely.
loss1 = torch.clamp(real - other + self.confidence, min=0.) # equiv to max(..., 0.)
loss1 = torch.sum(scale_const * loss1)
loss2 = dist.sum()
if args.debug_cw:
print("loss 1:", loss1.item(), " loss 2:", loss2.item())
loss = loss1 + loss2
return loss
def _optimize(self, optimizer, model, input_var, modifier_var, target_var, scale_const_var, input_token=None):
# apply modifier and clamp resulting image to keep bounded from clip_min to clip_max
batch_adv_sent = []
if self.mask is None:
# not word-level attack
input_adv = modifier_var + input_var
output = model(input_adv)
input_adv = model.get_embedding()
input_var = input_token
seqback = model.get_seqback()
batch_adv_sent = seqback.adv_sent.copy()
seqback.adv_sent = []
# input_adv = self.itereated_var = modifier_var + self.itereated_var
else:
# word level attack
input_adv = modifier_var * self.mask + self.itereated_var
# input_adv = modifier_var * self.mask + input_var
for i in range(input_adv.size(0)):
# for batch size
new_word_list = []
add_start = self.batch_info['add_start'][i]
add_end = self.batch_info['add_end'][i]
if add_end < 0:
add_end = len(input_adv[i]) - 1
for j in range(add_start, add_end):
new_placeholder = input_adv[i, j].data
temp_place = new_placeholder.expand_as(self.wv)
new_dist = torch.norm(temp_place - self.wv.data, 2, -1)
_, new_word = torch.min(new_dist, 0)
new_word_list.append(new_word.item())
# input_adv.data[j, i] = self.wv[new_word.item()].data
input_adv.data[i, j] = self.itereated_var.data[i, j] = self.wv[new_word.item()].data
del temp_place
batch_adv_sent.append(new_word_list)
output = model(self.seq, self.batch_info['segment_ids'], self.batch_info['input_mask'], inputs_embeds=input_adv)
if args.debug_cw:
print("output:", batch_adv_sent)
print("input_adv:", input_adv)
print("output:", output)
adv_seq = torch.tensor(self.seq)
for bi, (add_start, add_end) in enumerate(zip(self.batch_info['add_start'], self.batch_info['add_end'])):
adv_seq.data[bi, add_start:add_end] = torch.LongTensor(batch_adv_sent)
print("out:", adv_seq)
print("out embedding:", model.bert.embeddings.word_embeddings(adv_seq))
out = model(adv_seq, self.seq_len)['pred']
print("out:", out)
def reduce_sum(x, keepdim=True):
# silly PyTorch, when will you get proper reducing sums/means?
for a in reversed(range(1, x.dim())):
x = x.sum(a, keepdim=keepdim)
return x
def l1_dist(x, y, keepdim=True):
d = torch.abs(x - y)
return reduce_sum(d, keepdim=keepdim)
def l2_dist(x, y, keepdim=True):
d = (x - y) ** 2
return reduce_sum(d, keepdim=keepdim)
# distance to the original input data
if args.l1:
dist = l1_dist(input_adv, input_var, keepdim=False)
else:
dist = l2_dist(input_adv, input_var, keepdim=False)
loss = self._loss(output, target_var, dist, scale_const_var)
if args.debug_cw:
print(loss)
optimizer.zero_grad()
if input_token is None:
loss.backward()
else:
loss.backward(retain_graph=True)
torch.nn.utils.clip_grad_norm_([modifier_var], args.clip)
# print(modifier_var)
optimizer.step()
# print(modifier_var)
# modifier_var.data -= 2 * modifier_var.grad.data
# modifier_var.grad.data.zero_()
loss_np = loss.item()
dist_np = dist.data.cpu().numpy()
output_np = output.data.cpu().numpy()
input_adv_np = input_adv.data.cpu().numpy()
return loss_np, dist_np, output_np, input_adv_np, batch_adv_sent
def run(self, model, input, target, batch_idx=0, batch_size=None, input_token=None):
if batch_size is None:
batch_size = input.size(0) # ([length, batch_size, nhim])
# set the lower and upper bounds accordingly
lower_bound = np.zeros(batch_size)
scale_const = np.ones(batch_size) * self.initial_const
upper_bound = np.ones(batch_size) * 1e10
# python/numpy placeholders for the overall best l2, label score, and adversarial image
o_best_l2 = [1e10] * batch_size
o_best_score = [-1] * batch_size
o_best_logits = {}
if input_token is None:
best_attack = input.cpu().detach().numpy()
o_best_attack = input.cpu().detach().numpy()
else:
best_attack = input_token.cpu().detach().numpy()
o_best_attack = input_token.cpu().detach().numpy()
self.o_best_sent = {}
self.best_sent = {}
# setup input (image) variable, clamp/scale as necessary
input_var = torch.tensor(input, requires_grad=False)
self.itereated_var = torch.tensor(input_var)
# setup the target variable, we need it to be in one-hot form for the loss function
target_onehot = torch.zeros(target.size() + (self.num_classes,))
# print(target_onehot.size())
if self.cuda:
target_onehot = target_onehot.cuda()
target_onehot.scatter_(1, target.unsqueeze(1), 1.)
target_var = torch.tensor(target_onehot, requires_grad=False)
# setup the modifier variable, this is the variable we are optimizing over
modifier = torch.zeros(input_var.size()).float().cuda()
if self.cuda:
modifier = modifier.cuda()
modifier_var = torch.tensor(modifier, requires_grad=True)
optimizer = optim.Adam([modifier_var], lr=args.lr)
for search_step in range(self.binary_search_steps):
if args.debug_cw:
print('Batch: {0:>3}, search step: {1}'.format(batch_idx, search_step))
print('Const:')
for i, x in enumerate(scale_const):
print(i, x)
best_l2 = [1e10] * batch_size
best_score = [-1] * batch_size
best_logits = {}
# The last iteration (if we run many steps) repeat the search once.
if self.repeat and search_step == self.binary_search_steps - 1:
scale_const = upper_bound
scale_const_tensor = torch.from_numpy(scale_const).float()
if self.cuda:
scale_const_tensor = scale_const_tensor.cuda()
scale_const_var = torch.tensor(scale_const_tensor, requires_grad=False)
for step in range(self.max_steps):
# perform the attack
if self.mask is None:
if args.decreasing_temp:
cur_temp = args.temp - (args.temp - 0.1) / (self.max_steps - 1) * step
model.set_temp(cur_temp)
if args.debug_cw:
print("temp:", cur_temp)
else:
model.set_temp(args.temp)
loss, dist, output, adv_img, adv_sents = self._optimize(
optimizer,
model,
input_var,
modifier_var,
target_var,
scale_const_var,
input_token)
for i in range(batch_size):
target_label = target[i]
output_logits = output[i]
output_label = np.argmax(output_logits)
di = dist[i]
if self.debug:
if step % 100 == 0:
print('{0:>2} dist: {1:.5f}, output: {2:>3}, {3:5.3}, target {4:>3}'.format(
i, di, output_label, output_logits[output_label], target_label))
if di < best_l2[i] and self._compare_untargeted(output_logits, target_label):
# if self._compare(output_logits, target_label):
if self.debug:
print('{0:>2} best step, prev dist: {1:.5f}, new dist: {2:.5f}'.format(
i, best_l2[i], di))
best_l2[i] = di
best_score[i] = output_label
best_logits[i] = output_logits
best_attack[i] = adv_img[i]
self.best_sent[i] = adv_sents[i]
if di < o_best_l2[i] and self._compare(output_logits, target_label):
# if self._compare(output_logits, target_label):
if self.debug:
print('{0:>2} best total, prev dist: {1:.5f}, new dist: {2:.5f}'.format(
i, o_best_l2[i], di))
o_best_l2[i] = di
o_best_score[i] = output_label
o_best_logits[i] = output_logits
o_best_attack[i] = adv_img[i]
self.o_best_sent[i] = adv_sents[i]
sys.stdout.flush()
# end inner step loop
# adjust the constants
batch_failure = 0
batch_success = 0
for i in range(batch_size):
if self._compare(o_best_score[i], target[i]) and o_best_score[i] != -1:
batch_success += 1
if args.debug_cw:
print(self.o_best_sent[i])
print(o_best_score[i])
print(o_best_logits[i])
elif self._compare_untargeted(best_score[i], target[i]) and best_score[i] != -1:
o_best_l2[i] = best_l2[i]
o_best_score[i] = best_score[i]
o_best_attack[i] = best_attack[i]
self.o_best_sent[i] = self.best_sent[i]
if args.debug_cw:
print(self.o_best_sent[i])
print(o_best_score[i])
print(o_best_logits[i])
batch_success += 1
else:
batch_failure += 1
print('Num failures: {0:2d}, num successes: {1:2d}\n'.format(batch_failure, batch_success))
sys.stdout.flush()
# end outer search loop
return o_best_attack
| 44.962963 | 126 | 0.543582 | 3.03125 |
85963afcca8eca6c1bb7832716a10d2260d05acc | 1,474 | js | JavaScript | client/src/components/utils/InfiniteScroll.js | palaumarc/flickr_gallery | e6a194955016fa696610176c897aca1f88ab7acd | [
"MIT"
] | null | null | null | client/src/components/utils/InfiniteScroll.js | palaumarc/flickr_gallery | e6a194955016fa696610176c897aca1f88ab7acd | [
"MIT"
] | null | null | null | client/src/components/utils/InfiniteScroll.js | palaumarc/flickr_gallery | e6a194955016fa696610176c897aca1f88ab7acd | [
"MIT"
] | null | null | null | import React, { Component, Fragment } from 'react';
import PropTypes from 'prop-types';
import Spin from './Spin';
class InfiniteScroll extends Component {
static propTypes = {
loadMore: PropTypes.func.isRequired,
hasMore: PropTypes.bool
}
static defaultProps = {
hasMore: true
}
state = {
isLoading: false
}
onScroll = () => {
const { isLoading } = this.state;
if (isLoading) return;
// Checks that the page has scrolled to the bottom
if (window.innerHeight + document.documentElement.scrollTop === document.documentElement.offsetHeight) {
this.execLoadMore();
}
};
execLoadMore = async () => {
this.setState(prevState => ({...prevState, isLoading: true}));
await this.props.loadMore()
this.setState(prevState => ({...prevState, isLoading: false}));
if (!this.props.hasMore) {
document.removeEventListener('scroll', this.onScroll);
}
}
async componentDidMount() {
document.addEventListener('scroll', this.onScroll);
// Keep loading until available height is filled or there are no more elements
while (document.documentElement.offsetHeight < window.innerHeight && this.props.hasMore) {
await this.execLoadMore();
}
}
render() {
return (
<Fragment>
{this.props.children}
{this.state.isLoading ? <Spin /> : null}
</Fragment>
)
}
}
export default InfiniteScroll; | 24.163934 | 110 | 0.630258 | 3.265625 |
a8dbe5bd998d90d98e3c8c88c85874a068d84095 | 5,205 | rs | Rust | rs/replicated_state/src/page_map/tests.rs | audieleon/ic | 35dd8f93dec82662ed4df35664a9c0be6dbf203a | [
"Apache-2.0"
] | 1 | 2021-07-20T21:44:44.000Z | 2021-07-20T21:44:44.000Z | rs/replicated_state/src/page_map/tests.rs | AmoretAaron/ic | d9202bc7f6d16b2777d4e092ee1b7ad5899aae2b | [
"Apache-2.0"
] | null | null | null | rs/replicated_state/src/page_map/tests.rs | AmoretAaron/ic | d9202bc7f6d16b2777d4e092ee1b7ad5899aae2b | [
"Apache-2.0"
] | 1 | 2022-01-13T13:37:41.000Z | 2022-01-13T13:37:41.000Z | use super::{allocate_pages, checkpoint::Checkpoint, Buffer, PageDelta, PageIndex, PageMap};
use ic_sys::PAGE_SIZE;
use std::fs::OpenOptions;
#[test]
fn can_debug_display_a_page_map() {
let page_map = PageMap::new();
assert_eq!(format!("{:?}", page_map), "{}");
}
#[test]
fn can_create_an_empty_checkpoint() {
let checkpoint = Checkpoint::empty();
let empty_page = vec![0; *PAGE_SIZE];
let first_page = checkpoint.get_page(PageIndex::from(1));
assert_eq!(&empty_page[..], first_page);
}
#[test]
fn empty_page_map_returns_zeroed_pages() {
let page_map = PageMap::new();
let page = page_map.get_page(PageIndex::from(1));
assert_eq!(page.len(), *PAGE_SIZE);
assert!(page.iter().all(|b| *b == 0));
}
#[test]
fn can_update_a_page_map() {
let mut page_map = PageMap::new();
let ones = vec![1u8; *PAGE_SIZE];
let twos = vec![2u8; *PAGE_SIZE];
let delta = PageDelta::from(
&[
(PageIndex::from(1), &ones[..]),
(PageIndex::from(2), &twos[..]),
][..],
);
page_map.update(delta);
for (num, contents) in &[(1, 1), (2, 2), (3, 0)] {
assert!(page_map
.get_page(PageIndex::from(*num))
.iter()
.all(|b| *b == *contents));
}
}
#[test]
fn can_allocate_pages() {
let page = vec![5; *PAGE_SIZE];
let tracked_pages = allocate_pages(&[&page[..]]);
assert_eq!(tracked_pages.len(), 1);
assert_eq!(tracked_pages[0].contents(), page.as_slice());
}
#[test]
fn can_make_page_deltas() {
let page = vec![5u8; *PAGE_SIZE];
let page_delta = PageDelta::from(&[(PageIndex::from(5), &page[..])][..]);
assert_eq!(page_delta.len(), 1);
assert_eq!(page_delta.get_page(PageIndex::from(5)).unwrap(), &page[..])
}
#[test]
fn left_delta_wins_in_extend() {
let page_1 = vec![1u8; *PAGE_SIZE];
let page_2 = vec![2u8; *PAGE_SIZE];
let delta_1 = PageDelta::from(&[(PageIndex::from(1), &page_1[..])][..]);
let delta_2 = PageDelta::from(&[(PageIndex::from(1), &page_2[..])][..]);
let union_12 = delta_1.extend(delta_2);
assert_eq!(union_12.len(), 1);
assert_eq!(union_12.get_page(PageIndex::from(1)).unwrap(), &page_1[..]);
}
#[test]
fn persisted_map_is_equivalent_to_the_original() {
let tmp = tempfile::Builder::new()
.prefix("checkpoints")
.tempdir()
.unwrap();
let heap_file = tmp.path().join("heap");
let page_1 = vec![1u8; *PAGE_SIZE];
let page_3 = vec![3u8; *PAGE_SIZE];
let delta = PageDelta::from(
&[
(PageIndex::from(1), &page_1[..]),
(PageIndex::from(3), &page_3[..]),
][..],
);
let mut original_map = PageMap::default();
original_map.update(delta);
original_map.persist_delta(&heap_file).unwrap();
let persisted_map = PageMap::open(&heap_file).unwrap();
assert_eq!(persisted_map, original_map);
}
#[test]
fn can_persist_and_load_an_empty_page_map() {
let tmp = tempfile::Builder::new()
.prefix("checkpoints")
.tempdir()
.unwrap();
let heap_file = tmp.path().join("heap");
let original_map = PageMap::default();
original_map.persist_delta(&heap_file).unwrap();
let persisted_map = PageMap::open(&heap_file).expect("opening an empty page map must succeed");
assert_eq!(original_map, persisted_map);
}
#[test]
fn returns_an_error_if_file_size_is_not_a_multiple_of_page_size() {
use std::io::Write;
let tmp = tempfile::Builder::new()
.prefix("checkpoints")
.tempdir()
.unwrap();
let heap_file = tmp.path().join("heap");
OpenOptions::new()
.write(true)
.create(true)
.open(&heap_file)
.unwrap()
.write_all(&vec![1; *PAGE_SIZE / 2])
.unwrap();
match PageMap::open(&heap_file) {
Err(err) => assert!(
err.is_invalid_heap_file(),
"Expected invalid heap file error, got {:?}",
err
),
Ok(_) => panic!("Expected a invalid heap file error, got Ok(_)"),
}
}
#[test]
fn can_use_buffer_to_modify_page_map() {
let page_1 = vec![1u8; *PAGE_SIZE];
let page_3 = vec![3u8; *PAGE_SIZE];
let delta = PageDelta::from(
&[
(PageIndex::from(1), &page_1[..]),
(PageIndex::from(3), &page_3[..]),
][..],
);
let mut page_map = PageMap::default();
page_map.update(delta);
let n = 4 * *PAGE_SIZE;
let mut vec_buf = vec![0u8; n];
vec_buf[*PAGE_SIZE..2 * *PAGE_SIZE].copy_from_slice(&page_1);
vec_buf[3 * *PAGE_SIZE..4 * *PAGE_SIZE].copy_from_slice(&page_3);
let mut buf = Buffer::new(page_map);
let mut read_buf = vec![0u8; n];
buf.read(&mut read_buf[..], 0);
assert_eq!(read_buf, vec_buf);
for offset in 0..n {
let mut len = 1;
while (offset + len) < n {
let b = ((offset + len) % 15) as u8;
for dst in vec_buf.iter_mut().skip(offset).take(len) {
*dst = b;
}
buf.write(&vec_buf[offset..offset + len], offset);
buf.read(&mut read_buf[..], 0);
assert_eq!(read_buf, vec_buf);
len *= 2;
}
}
}
| 27.68617 | 99 | 0.578866 | 3.09375 |
71e68956d75edcf9698c155dfeb03a06cc6ecaee | 1,472 | kt | Kotlin | modules/drawing/src/main/kotlin/silentorb/mythic/drawing/drawText.kt | silentorb/mythic-kotlin | 74462fcba9e7805dddec1bfcb3431665df7d0dee | [
"MIT"
] | 1 | 2020-06-24T02:01:23.000Z | 2020-06-24T02:01:23.000Z | modules/drawing/src/main/kotlin/silentorb/mythic/drawing/drawText.kt | silentorb/mythic-kotlin | 74462fcba9e7805dddec1bfcb3431665df7d0dee | [
"MIT"
] | 5 | 2019-12-24T17:14:43.000Z | 2021-01-26T22:33:47.000Z | modules/drawing/src/main/kotlin/silentorb/mythic/drawing/drawText.kt | silentorb/mythic-kotlin | 74462fcba9e7805dddec1bfcb3431665df7d0dee | [
"MIT"
] | null | null | null | package silentorb.mythic.drawing
import silentorb.mythic.glowing.DrawMethod
import silentorb.mythic.glowing.VertexSchema
import silentorb.mythic.glowing.globalState
import silentorb.mythic.spatial.Matrix
import silentorb.mythic.spatial.Vector2
import silentorb.mythic.typography.TextConfiguration
import silentorb.mythic.typography.TextPackage
import silentorb.mythic.typography.prepareText
import silentorb.mythic.spatial.Vector2i
import org.lwjgl.opengl.GL11.*
fun getUnitScaling(dimensions: Vector2i) =
if (dimensions.x < dimensions.y)
Vector2(1f, dimensions.x.toFloat() / dimensions.y)
else
Vector2(dimensions.y.toFloat() / dimensions.x, 1f)
fun prepareTextMatrix(pixelsToScalar: Matrix, position: Vector2) =
Matrix.identity
.mul(pixelsToScalar)
.translate(position.x, position.y, 0f)
fun renderText(config: TextConfiguration, effect: ColoredImageShader, textPackage: TextPackage, transform: Matrix) {
effect.activate(transform, config.style.color, config.style.font.texture)
globalState.blendEnabled = true
globalState.blendFunction = Pair(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
textPackage.mesh.draw(DrawMethod.triangleFan)
}
fun drawTextRaw(config: TextConfiguration, effect: ColoredImageShader, vertexSchema: VertexSchema, transform: Matrix) {
val textPackage = prepareText(config, vertexSchema)
if (textPackage != null) {
renderText(config, effect, textPackage, transform)
textPackage.mesh.dispose()
}
}
| 36.8 | 119 | 0.796196 | 3.109375 |
f6743e007e18ef6144696c5a28916071fb960531 | 1,915 | kt | Kotlin | src/main/kotlin/ui/util/swing/Tables.kt | marshallbrain/pulsar-java | fed43a0164421b27056045446f07982e0313452d | [
"MIT"
] | null | null | null | src/main/kotlin/ui/util/swing/Tables.kt | marshallbrain/pulsar-java | fed43a0164421b27056045446f07982e0313452d | [
"MIT"
] | null | null | null | src/main/kotlin/ui/util/swing/Tables.kt | marshallbrain/pulsar-java | fed43a0164421b27056045446f07982e0313452d | [
"MIT"
] | null | null | null | package ui.util.swing
import java.awt.Dimension
import javax.swing.BorderFactory
import javax.swing.JScrollPane
import javax.swing.JTable
import javax.swing.ListSelectionModel
import javax.swing.border.Border
import javax.swing.table.DefaultTableModel
fun createScrollTable(
table: JTable,
maxVisibleRows: Int = table.rowCount,
border: Border = BorderFactory.createEmptyBorder()
) : JScrollPane {
val scrollPane = object : ContinuesScrollPane(table) {
override fun getPreferredSize(): Dimension {
val insets = border.getBorderInsets(this)
return Dimension(0, (table.rowHeight)*maxVisibleRows) +
columnHeader.preferredSize +
Dimension(insets.left + insets.right, insets.top + insets.bottom)
}
override fun getMinimumSize(): Dimension = preferredSize
override fun getMaximumSize(): Dimension = preferredSize
init {
addMouseWheelListener(ContinuesMouseWheelListener())
}
}
scrollPane.border = border
return scrollPane
}
fun createTable(
vararg columnNames: String,
data: MutableList<*> = emptyList<String>().toMutableList()
) : JTable {
val formattedData = getFormattedData(data, *columnNames)
val table = object : JTable(DefaultTableModel(formattedData, columnNames)) {
override fun isCellEditable(row: Int, column: Int): Boolean = false
}
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION)
table.cellSelectionEnabled = false
table.rowSelectionAllowed = true
table.fillsViewportHeight = true
return table
}
private fun getFormattedData(
data: MutableList<*>,
vararg columnNames: String
) : Array<Array<*>> {
val colNum = columnNames.size
val rowNum = data.size / colNum
return List(rowNum) { row ->
List(colNum) { col ->
data[row * colNum + col]
}.toTypedArray()
}.toTypedArray()
}
private operator fun Dimension.plus(i : Dimension) : Dimension {
return Dimension(width + i.width, height + i.height)
}
| 23.641975 | 77 | 0.746214 | 3.140625 |
124f49977e548fd065352dda1813f453f87675ba | 5,700 | h | C | src/backgroundprocess/app.h | Skycoder42/QBackgroundProcess | 9681aa736729ce1b7985e8664e0f05729235e0c4 | [
"BSD-3-Clause"
] | 10 | 2017-08-17T16:34:03.000Z | 2021-02-16T17:42:30.000Z | src/backgroundprocess/app.h | Skycoder42/QBackgroundProcess | 9681aa736729ce1b7985e8664e0f05729235e0c4 | [
"BSD-3-Clause"
] | null | null | null | src/backgroundprocess/app.h | Skycoder42/QBackgroundProcess | 9681aa736729ce1b7985e8664e0f05729235e0c4 | [
"BSD-3-Clause"
] | 4 | 2018-08-27T06:15:11.000Z | 2021-03-08T10:10:34.000Z | #ifndef QTBACKGROUNDPROCESS_APP_H
#define QTBACKGROUNDPROCESS_APP_H
#include "QtBackgroundProcess/qtbackgroundprocess_global.h"
#include <QtCore/qcommandlineparser.h>
#include <QtCore/qcoreapplication.h>
#include <QtCore/qexception.h>
#include <functional>
//! The Namespace containing all classes of the QtBackgroundProcess module
namespace QtBackgroundProcess {
class Terminal;
//! Will be thrown, if you perform an operation, that is not allowed in running state
class Q_BACKGROUNDPROCESS_EXPORT NotAllowedInRunningStateException : public QException
{
public:
NotAllowedInRunningStateException();
//! @inherit{std::exception::what}
const char *what() const noexcept override;
//! @inherit{QException::raise}
void raise() const override;
//! @inherit{QException::clone}
QException *clone() const override;
};
class AppPrivate;
//! The background process application. The main class of QtBackgroundProcess
class Q_BACKGROUNDPROCESS_EXPORT App : public QCoreApplication
{
Q_OBJECT
friend class AppPrivate;
//! The current id of the singleton instance of the master process
Q_PROPERTY(QString instanceID READ instanceID WRITE setInstanceID RESET createDefaultInstanceID)
//! Specify whether the app should be a systemwide or userwide single instance
Q_PROPERTY(bool globalInstance READ globalInstance WRITE setGlobalInstance)
//! Specifies, whether the master should forward debug output to all terminals
Q_PROPERTY(bool forwardMasterLog READ forwardMasterLog WRITE setForwardMasterLog)
//! If true, the master process will always be started, not only with "start"
Q_PROPERTY(bool autoStartMaster READ autoStartMaster WRITE setAutoStartMaster)
//! If true, "start" commands will be ignored, if the master is already running
Q_PROPERTY(bool ignoreMultiStarts READ ignoreMultiStarts WRITE setIgnoreMultiStarts)
//! If true, the master process will automatically delete terminals that have been disconnected
Q_PROPERTY(bool autoDeleteTerminals READ autoDeleteTerminals WRITE setAutoDeleteTerminals)
//! If true, the master process will automatically close terminals after it received the parameters
Q_PROPERTY(bool autoKillTerminals READ autoKillTerminals WRITE setAutoKillTerminals)
//! Holds a list of all currently connected terminals
Q_PROPERTY(QList<Terminal*> connectedTerminals READ connectedTerminals NOTIFY connectedTerminalsChanged)
public:
//! Creates a new app with it's arguments
App(int &argc, char **argv, int flags = ApplicationFlags);
//! Destructor
~App();
//! @readAcFn{App::instanceID}
QString instanceID() const;
//! @readAcFn{App::globalInstance}
bool globalInstance() const;
//! @readAcFn{App::forwardMasterLog}
bool forwardMasterLog() const;
//! @readAcFn{App::autoStartMaster}
bool autoStartMaster() const;
//! @readAcFn{App::ignoreMultiStarts}
bool ignoreMultiStarts() const;
//! @readAcFn{App::autoDeleteTerminals}
bool autoDeleteTerminals() const;
//! @readAcFn{App::autoKillTerminals}
bool autoKillTerminals() const;
//! Sets the function to be called for the creation of the parser (Instead of overriding)
void setParserSetupFunction(const std::function<void(QCommandLineParser &)> &function);
//! Sets the function to be called to startup the application (Instead of overriding)
void setStartupFunction(const std::function<int (const QCommandLineParser &)> &function);
//! Sets the function to be called to handle shutdown requests (Instead of overriding)
void setShutdownRequestFunction(const std::function<bool(const QCommandLineParser &, int&)> &function);
//! Sets the function to be called to handle shutdown requests (Instead of overriding)
void setShutdownRequestFunction(const std::function<bool(Terminal*, int&)> &function);
//! Executes the application event loop
int exec();
//! @readAcFn{App::connectedTerminals}
QList<Terminal*> connectedTerminals() const;
public Q_SLOTS:
//! @resetAcFn{App::instanceID}
void createDefaultInstanceID(bool overwrite = true);
//! @writeAcFn{App::instanceID}
void setInstanceID(QString instanceID, bool useAsSeed = true);
//! @writeAcFn{App::globalInstance}
void setGlobalInstance(bool globalInstance);
//! @writeAcFn{App::forwardMasterLog}
void setForwardMasterLog(bool forwardMasterLog);
//! @writeAcFn{App::autoStartMaster}
void setAutoStartMaster(bool autoStartMaster);
//! @writeAcFn{App::ignoreMultiStarts}
void setIgnoreMultiStarts(bool ignoreMultiStarts);
//! @writeAcFn{App::autoDeleteTerminals}
void setAutoDeleteTerminals(bool autoDeleteTerminals, bool changeCurrent = false);
//! @writeAcFn{App::autoKillTerminals}
void setAutoKillTerminals(bool autoKillTerminals, bool killCurrent = false);
Q_SIGNALS:
//! Will be emitted when a new terminal has connected to the master
void newTerminalConnected(QtBackgroundProcess::Terminal *terminal, QPrivateSignal);
//! Will be emitted when a new terminal sent arguments to the master
void commandReceived(QSharedPointer<QCommandLineParser> parser, bool isStarter, QPrivateSignal);
//! @notifyAcFn{App::connectedTerminals}
void connectedTerminalsChanged(QList<Terminal*> connectedTerminals, QPrivateSignal);
protected:
//! Sets up the parser to parse commands and arguments
virtual void setupParser(QCommandLineParser &parser, bool useShortOptions = true);
//! Is called as initialization function of the master process
virtual int startupApp(const QCommandLineParser &parser);
//! Gets called when a terminal requests a shutdown of the master
virtual bool requestAppShutdown(Terminal *terminal, int &exitCode);
private:
AppPrivate* d;
};
}
#undef qApp
#define qApp static_cast<QtBackgroundProcess::App*>(QCoreApplication::instance())
#endif // QTBACKGROUNDPROCESS_APP_H
| 41.605839 | 105 | 0.792982 | 3.140625 |
f037e3032ce0ce519e6e32edcbecb11a4130e35e | 1,595 | js | JavaScript | www/js/search_worker.js | xqqy/myfuzhong | 25e7f6e79b65a4e2c550f4b8d43a35f5030fcf1b | [
"Apache-2.0"
] | 1 | 2018-01-27T15:48:01.000Z | 2018-01-27T15:48:01.000Z | www/js/search_worker.js | xqqy/myfuzhong | 25e7f6e79b65a4e2c550f4b8d43a35f5030fcf1b | [
"Apache-2.0"
] | null | null | null | www/js/search_worker.js | xqqy/myfuzhong | 25e7f6e79b65a4e2c550f4b8d43a35f5030fcf1b | [
"Apache-2.0"
] | null | null | null | var list,all;
function dialogAlert(message, title, buttonname, callback) { //通知服务
title = title || "错误";
buttonname = buttonname || "确定";
callback = callback || function () {
return;
}
if(navigator.notification){
navigator.notification.alert(message, callback, title, buttonname);
}else{
alert(message);
}
}
function get(e) {//获取列表
var data=new FormData;
data.append("UID",e.data[2]);
data.append("TOKEN",e.data[3]);
var xhr = new XMLHttpRequest;
xhr.open("post", e.data[1], true);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
if (xhr.responseText.split("/meow/")[0] == "done") {
list = xhr.responseText.split("/meow/");
all = list.length
onmsg(e)
} else {
dialogAlert(xhr.responseText);
close();
}
} else {
dialogAlert("网络不能连接") + xhr.status;
close();
}
}
}
xhr.send(data);
}
onmessage = onmsg;
function onmsg(e) {
if(!list){
get(e);
return;
}
var now = 1,
ret = "";
while (now < all) {
if (list[now+2].toUpperCase().indexOf(document.getElementById("search").value.toUpperCase()) > -1) {
ret+='<a href="#" class="collection-item" onclick="ati('+"'"+list[now+1]+"'"+','+"'"+list[now]+"'"+')">'+list[now+2]+'</a>';
}
now += 3;
}
postMessage(ret);
}; | 29 | 136 | 0.485893 | 3.140625 |
0bc6ef3ed241becc1afee395cbae4a3b843d3a02 | 1,342 | js | JavaScript | app/components/layout/Header.js | yarikgenza/trello | a95f9418757aedd696ee24ee9c4be4fd4ab01a67 | [
"MIT"
] | 1 | 2017-01-07T23:57:15.000Z | 2017-01-07T23:57:15.000Z | app/components/layout/Header.js | yarikgenza/trello | a95f9418757aedd696ee24ee9c4be4fd4ab01a67 | [
"MIT"
] | null | null | null | app/components/layout/Header.js | yarikgenza/trello | a95f9418757aedd696ee24ee9c4be4fd4ab01a67 | [
"MIT"
] | null | null | null | import React, {Component} from 'react';
import {Button} from 'react-bootstrap';
export default class Header extends Component {
constructor() {
super();
this.state = {
text: ''
}
}
componentDidMount() {
const token = localStorage.getItem('token');
const {text} = this.state;
if (token) {
fetch('/api/user', {
method: 'get',
headers: {
'Content-type': 'application/json',
authorization: token
}
})
.then(res => res.json())
.then((res) => {
this.setState({
text: res.login
})
})
.catch((e) => { console.log(e) })
}
}
logOut() {
localStorage.removeItem('token');
window.location.href = '/'
}
render() {
const token = localStorage.getItem('token');
if (token) {
return (
<div className="header">
<h1 className="header_positioned">Trello</h1>
<div className="logOut">
<div className="userLogin">
<p>{this.state.text}</p>
</div>
<Button onClick={() => this.logOut()} bsStyle="info">Log out</Button>
</div>
</div>
)
} else {
return (
<div className="header">
<h1>Trello</h1>
</div>
)
}
}
}
| 20.333333 | 81 | 0.481371 | 3.046875 |
11c17cffca45808b3ff136341eb793f249307f98 | 2,384 | rs | Rust | src/pointer.rs | sim82/game1 | e4cd03c91421dea3234eaf7b4f9fabbae30312be | [
"MIT"
] | null | null | null | src/pointer.rs | sim82/game1 | e4cd03c91421dea3234eaf7b4f9fabbae30312be | [
"MIT"
] | null | null | null | src/pointer.rs | sim82/game1 | e4cd03c91421dea3234eaf7b4f9fabbae30312be | [
"MIT"
] | null | null | null | use bevy::{
input::mouse::{MouseButtonInput, MouseMotion},
prelude::*,
};
pub struct MouseGrabState {
pub shall_grab: bool,
known_state: bool,
}
fn mouse_grab_system(
mut grab_state: ResMut<MouseGrabState>,
mut windows: ResMut<Windows>,
keyboard_input: Res<Input<KeyCode>>,
) {
let update = if keyboard_input.just_pressed(KeyCode::Grave) {
grab_state.shall_grab = !grab_state.shall_grab;
true
} else {
false
};
if update || !grab_state.known_state {
grab_state.known_state = true;
let window = windows.get_primary_mut().unwrap();
if window.cursor_locked() != grab_state.shall_grab {
window.set_cursor_lock_mode(grab_state.shall_grab);
window.set_cursor_visibility(!grab_state.shall_grab);
}
}
}
#[derive(Default)]
pub struct PrimaryPointerPos {
pub pos: Vec3,
}
#[derive(Debug)]
pub struct ClickEvent {
pub pos: Vec3,
}
#[derive(Component)]
pub struct MousePointerFlag;
pub fn mouse_input_system(
mut query: Query<&mut Transform, With<MousePointerFlag>>,
mut mouse_button_input_events: EventReader<MouseButtonInput>,
mut mouse_motion_events: EventReader<MouseMotion>,
mut primary_pointer: ResMut<PrimaryPointerPos>,
grab_state: Res<MouseGrabState>,
mut click_events: EventWriter<ClickEvent>,
) {
if !grab_state.shall_grab {
return;
}
for mut transform in query.iter_mut() {
for event in mouse_motion_events.iter() {
let d = Vec3::new(event.delta.x, -event.delta.y, 0.0);
transform.translation += d * 0.5;
}
primary_pointer.pos = transform.translation;
}
for event in mouse_button_input_events.iter() {
if event.button == MouseButton::Left && event.state.is_pressed() {
// info!("pressed");
click_events.send(ClickEvent {
pos: primary_pointer.pos,
})
}
}
}
pub struct PointerPlugin;
impl Plugin for PointerPlugin {
fn build(&self, app: &mut App) {
app.add_system(mouse_input_system)
.add_system(mouse_grab_system)
.init_resource::<PrimaryPointerPos>()
.insert_resource(MouseGrabState {
shall_grab: true,
known_state: false,
})
.add_event::<ClickEvent>();
}
}
| 26.786517 | 74 | 0.630453 | 3.078125 |
18bf1a0139d71e8ef2b676cb4ba44ba527cc0964 | 1,872 | sql | SQL | Behavior_Analysis_Setup/StudentTimeAndPoints.sql | bbrub49/SQL-Code-Examples | 8c4be22f31e52b906d324174a93486a1ace0c4eb | [
"MIT"
] | null | null | null | Behavior_Analysis_Setup/StudentTimeAndPoints.sql | bbrub49/SQL-Code-Examples | 8c4be22f31e52b906d324174a93486a1ace0c4eb | [
"MIT"
] | null | null | null | Behavior_Analysis_Setup/StudentTimeAndPoints.sql | bbrub49/SQL-Code-Examples | 8c4be22f31e52b906d324174a93486a1ace0c4eb | [
"MIT"
] | null | null | null | /*
* Establishes the reason and minutes a student is absent
* if the absence is excused in order to remove those
* minutes from the students total time and total points
* possible to earn that day
*/
DECLARE @yeartype as varchar(15)
SET @yeartype = (SELECT [Type] FROM NSSEOPulse.dbo.ZM_TEST_BB_SchoolYear WHERE [Description] = 'Current')
IF @yeartype = 'Summer'
SELECT ArchYear, dp.Site, dp.StudentID, StartDate, EndDate, RngS, RngE, ServiceCode, ProgramDescription, EntryValue,
GroupNumber, StartWk, EndWk, DayDate, DtRng, TotalDays,
(CAST(MinPossible as float) - CAST(MinPresent as float)) as MinutesAbsent,
'ESY' as ReasonCode, 0.00 as ActualAbsence
FROM ZZ_TEST_BB_20DayDataPrep dp LEFT OUTER JOIN (
SELECT SIte, SchoolYear, StudentID, AtnDate, SubjSec, MinPresent, MinPossible, ATNSpecialCode, ATNSpecialComment
FROM ZZ_TEST_ATNSpecialAdditional
WHERE SIte = 'TR'
) as ma ON dp.StudentID = ma.StudentID AND dp.DayDate = CONVERT(Date,ma.AtnDate)
ELSE
SELECT ArchYear, dp.Site, dp.StudentID, StartDate, EndDate, RngS, RngE, ServiceCode, ProgramDescription, EntryValue,
GroupNumber, StartWk, EndWk, DayDate, DtRng, TotalDays,
CASE
WHEN TotalMinutesAbs IS NULL THEN 0
ELSE TotalMinutesAbs
END AS ActualAbsence,
CASE
WHEN (ReasonCode IN ('E','L','X','M')) OR (ReasonCode = 'G' AND ServiceCode IN ('1','2','3')) THEN TotalMinutesAbs
ELSE 0
END AS MinutesAbsent,
ReasonCode
FROM ZZ_TEST_BB_20DayDataPrep dp LEFT OUTER JOIN (
SELECT DISTINCT PrimarySite, ma.StudentID, ma.TotalMinutesAbs, Minutespossible, ma.AttendanceDate, ReasonCode
FROM ZZ_TEST_ATNStudentDetail ma INNER JOIN ZZ_TEST_ATNAttendanceMarks am ON ma.StudentID = am.StudentID AND ma.AttendanceDate = am.AbsentDate
WHERE PrimarySite = 'TR'
) as ma ON dp.StudentID = ma.StudentID AND CONVERT(Date,dp.DayDate) = CONVERT(Date,ma.AttendanceDate) | 42.545455 | 143 | 0.76015 | 3.203125 |
f07411bf6835efa66845aedc9d0915e9f4597ba2 | 1,138 | py | Python | UnitTests/FullAtomModel/CoordsTransform/test_forward.py | johahi/TorchProteinLibrary | b1fc9faa9b51c4550e5f754d075766ba38e0f8a0 | [
"MIT"
] | null | null | null | UnitTests/FullAtomModel/CoordsTransform/test_forward.py | johahi/TorchProteinLibrary | b1fc9faa9b51c4550e5f754d075766ba38e0f8a0 | [
"MIT"
] | null | null | null | UnitTests/FullAtomModel/CoordsTransform/test_forward.py | johahi/TorchProteinLibrary | b1fc9faa9b51c4550e5f754d075766ba38e0f8a0 | [
"MIT"
] | null | null | null | import sys
import os
import torch
import numpy as np
from TorchProteinLibrary.FullAtomModel.CoordsTransform import CoordsTranslate, getRandomTranslation, getBBox, CoordsRotate, getRandomRotation
from TorchProteinLibrary.FullAtomModel import Angles2Coords, Coords2TypedCoords
def test_translation(coords, num_atoms):
translate = CoordsTranslate()
a,b = getBBox(coords, num_atoms)
center = (a+b)*0.5
print (center)
centered_coords = translate(coords, -center, num_atoms)
a,b = getBBox(centered_coords, num_atoms)
center = (a+b)*0.5
print(center)
def test_rotation(coords, num_atoms):
batch_size = num_atoms.size(0)
R = getRandomRotation(batch_size)
rotate = CoordsRotate()
rotated = rotate(coords, R, num_atoms)
print(rotated)
if __name__=='__main__':
sequences = ['GGGGGG', 'GGAARRRRRRRRR']
angles = torch.zeros(2, 7,len(sequences[1]), dtype=torch.double)
angles[:,0,:] = -1.047
angles[:,1,:] = -0.698
angles[:,2:,:] = 110.4*np.pi/180.0
a2c = Angles2Coords()
protein, res_names, atom_names, num_atoms = a2c(angles, sequences)
test_translation(protein, num_atoms)
test_rotation(protein, num_atoms)
| 25.863636 | 141 | 0.748682 | 3.203125 |
a179af87613d1e41c4a92bf0b289fa58d5086d23 | 2,360 | go | Go | server/server.go | asommer70/evergreeen-new-releases | ebf1a02a2b199d68a138ad4b5e3b76ae63aa272a | [
"MIT"
] | null | null | null | server/server.go | asommer70/evergreeen-new-releases | ebf1a02a2b199d68a138ad4b5e3b76ae63aa272a | [
"MIT"
] | null | null | null | server/server.go | asommer70/evergreeen-new-releases | ebf1a02a2b199d68a138ad4b5e3b76ae63aa272a | [
"MIT"
] | null | null | null | package main
import (
"fmt"
"github.com/anaskhan96/soup"
"net/http"
//"io/ioutil"
"encoding/json"
"os"
"strconv"
"strings"
"time"
)
type TitleSearchResult struct {
Query string
Results []TitleResult
}
type TitleResult struct {
Name, Description, Url []string
}
func main() {
// titles := getSearchPage()
// fmt.Println("titles:", titles)
// for _, title := range titles {
// // TODO:as create Go Routines for getting information for each title.
// }
getMovieInfo()
}
func getSearchPage() []string {
base_url := "http://nccardinal.org"
library_number := 132
search_url := "/eg/opac/results?bool=and&qtype=keyword&contains=contains&query=&bool=and&qtype=title&contains=contains&query=&bool=and&qtype=author&contains=contains&query=&_adv=1&detail_record_view=0&fi%3Aitem_type=g&fi%3Avr_format=v&locg=" + strconv.Itoa(library_number) + "&pubdate=is&date1=&date2=&sort=pubdate.descending"
url := base_url + search_url
//fmt.Println("url:", url)
resp, err := soup.Get(url)
if err != nil {
os.Exit(1)
}
doc := soup.HTMLParse(resp)
links := doc.FindAll("a", "class", "record_title search_link")
//fmt.Println(links)
// TODO:as also get the library link for each movie.
titles := make([]string, len(links))
for _, link := range links {
// fmt.Println(link.Text(), "| Link :", link.Attrs()["href"])
//fmt.Println(strings.TrimSpace(strings.Split(link.Text(), "[videorecording]")[0]))
titles = append(titles, strings.TrimSpace(strings.Split(link.Text(), "[videorecording]")[0]))
}
return titles
}
func getMovieInfo() {
title := "The Post"
searchUrl := "https://en.wikipedia.org/w/api.php?action=opensearch&format=json&search="
//searchRes := TitleSearchResult{}
var model []interface{}
getJson(searchUrl + title, &model)
}
var myClient = &http.Client{Timeout: 10 * time.Second}
func getJson(url string, target interface{}) error {
r, err := myClient.Get(url)
if err != nil {
return err
}
defer r.Body.Close()
if err := json.Unmarshal([]byte(r.Body), target); err != nil {
fmt.Println("err:", err)
}
// fmt.Println("searchRes:", model)
for _, x := range model {
switch value := x.(type) {
case string:
fmt.Println(value)
case []interface{}:
for _, v := range value {
fmt.Println(v.(string))
}
}
}
return json.NewDecoder(r.Body).Decode(target)
}
| 23.6 | 327 | 0.665254 | 3.09375 |
20f45f70e14928808cae9d38bd843ce4c03dd39d | 2,739 | lua | Lua | rot/rot/noise/simplex.lua | LJNIC/Collaborogue | 9c333e790f043087045d9236ccde0e496ebfc96b | [
"MIT"
] | 4 | 2020-11-09T23:33:48.000Z | 2021-04-19T23:14:50.000Z | rot/rot/noise/simplex.lua | LJNIC/Collaborogue | 9c333e790f043087045d9236ccde0e496ebfc96b | [
"MIT"
] | 1 | 2021-11-13T15:53:53.000Z | 2021-11-13T15:53:53.000Z | rot/rot/noise/simplex.lua | LJNIC/Collaborogue | 9c333e790f043087045d9236ccde0e496ebfc96b | [
"MIT"
] | 2 | 2021-11-11T03:25:30.000Z | 2022-02-13T13:05:00.000Z | --- Simplex Noise Generator.
-- Based on a simple 2d implementation of simplex noise by Ondrej Zara
-- Which is based on a speed-improved simplex noise algorithm for 2D, 3D and 4D in Java.
-- Which is based on example code by Stefan Gustavson ([email protected]).
-- With Optimisations by Peter Eastman ([email protected]).
-- Better rank ordering method by Stefan Gustavson in 2012.
-- @module ROT.Noise.Simplex
local ROT = require((...):gsub(('.[^./\\]*'):rep(2) .. '$', ''))
local Simplex = ROT.Noise:extend("Simplex")
--- Constructor.
-- 2D simplex noise generator.
-- @tparam int gradients The random values for the noise.
function Simplex:init(gradients)
self._F2 = .5 * (math.sqrt(3) - 1)
self._G2 = (3 - math.sqrt(3)) / 6
self._gradients = {
{ 0, - 1},
{ 1, - 1},
{ 1, 0},
{ 1, 1},
{ 0, 1},
{ - 1, 1},
{ - 1, 0},
{ - 1, - 1}
}
local permutations = {}
local count = gradients and gradients or 256
for i = 1, count do
table.insert(permutations, i)
end
permutations = table.randomize(permutations)
self._perms = {}
self._indexes = {}
for i = 1, 2 * count do
table.insert(self._perms, permutations[i%count + 1])
table.insert(self._indexes, self._perms[i] % #self._gradients + 1)
end
end
--- Get noise for a cell
-- Iterate over this function to retrieve noise values
-- @tparam int xin x-position of noise value
-- @tparam int yin y-position of noise value
function Simplex:get(xin, yin)
local perms = self._perms
local indexes = self._indexes
local count = #perms / 2
local G2 = self._G2
local n0, n1, n2, gi = 0, 0, 0
local s = (xin + yin) * self._F2
local i = math.floor(xin + s)
local j = math.floor(yin + s)
local t = (i + j) * G2
local X0 = i - t
local Y0 = j - t
local x0 = xin - X0
local y0 = yin - Y0
local i1, j1
if x0 > y0 then
i1 = 1
j1 = 0
else
i1 = 0
j1 = 1
end
local x1 = x0 - i1 + G2
local y1 = y0 - j1 + G2
local x2 = x0 - 1 + 2 * G2
local y2 = y0 - 1 + 2 * G2
local ii = i%count + 1
local jj = j%count + 1
local t0 = .5 - x0 * x0 - y0 * y0
if t0 >= 0 then
t0 = t0 * t0
gi = indexes[ii + perms[jj]]
local grad = self._gradients[gi]
n0 = t0 * t0 * (grad[1] * x0 + grad[2] * y0)
end
local t1 = .5 - x1 * x1 - y1 * y1
if t1 >= 0 then
t1 = t1 * t1
gi = indexes[ii + i1 + perms[jj + j1]]
local grad = self._gradients[gi]
n1 = t1 * t1 * (grad[1] * x1 + grad[2] * y1)
end
local t2 = .5 - x2 * x2 - y2 * y2
if t2 >= 0 then
t2 = t2 * t2
gi = indexes[ii + 1 + perms[jj + 1]]
local grad = self._gradients[gi]
n2 = t2 * t2 * (grad[1] * x2 + grad[2] * y2)
end
return 70 * (n0 + n1 + n2)
end
return Simplex
| 25.12844 | 88 | 0.586345 | 3.328125 |
330bc2029c1246f778fe532317958ef2c30db80a | 10,719 | py | Python | touca/_case.py | trytouca/touca-python | dab4bb6760a173952b63ea14fd4bc30c3877744e | [
"Apache-2.0"
] | 11 | 2021-06-29T04:51:28.000Z | 2022-03-22T05:58:44.000Z | touca/_case.py | trytouca/touca-python | dab4bb6760a173952b63ea14fd4bc30c3877744e | [
"Apache-2.0"
] | null | null | null | touca/_case.py | trytouca/touca-python | dab4bb6760a173952b63ea14fd4bc30c3877744e | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Touca, Inc. Subject to Apache-2.0 License.
from ._types import IntegerType, VectorType, ToucaType
from datetime import datetime, timedelta
from enum import Enum
from typing import Dict, Tuple
class ResultCategory(Enum):
""" """
Check = 1
Assert = 2
class ResultEntry:
"""
Wrapper around a given ``ToucaType`` value that includes the category
it should belong to.
We are intentionally not using ``@dataclass`` to ensure the core library
has no dependency on ``dataclasses`` module. This may change in the future.
"""
def __init__(self, typ: ResultCategory, val: ToucaType):
"""
Creates an entry given its value and the category it should belong to.
:param typ: type of the entry
:param val: value of the entry
"""
self.typ = typ
self.val = val
class Case:
""" """
def __init__(self, **kwargs):
self._meta = kwargs
self._results: Dict[str, ResultEntry] = dict()
self._tics: Dict[str, datetime] = dict()
self._tocs: Dict[str, datetime] = dict()
def check(self, key: str, value: ToucaType):
"""
Logs a given value as a test result for the declared test case
and associates it with the specified key.
:param key: name to be associated with the logged test result
:param value: value to be logged as a test result
"""
self._results[key] = ResultEntry(typ=ResultCategory.Check, val=value)
def assume(self, key: str, value: ToucaType):
"""
Logs a given value as an assertion for the declared test case
and associates it with the specified key.
:param key: name to be associated with the logged test result
:param value: value to be logged as a test result
"""
self._results[key] = ResultEntry(typ=ResultCategory.Assert, val=value)
def add_array_element(self, key: str, value: ToucaType):
"""
Adds a given value to a list of results for the declared
test case which is associated with the specified key.
Could be considered as a helper utility function.
This method is particularly helpful to log a list of items as they
are found:
.. code-block:: python
for number in numbers:
if is_prime(number):
touca.add_array_element("prime numbers", number)
touca.add_hit_count("number of primes")
This pattern can be considered as a syntactic sugar for the following
alternative:
.. code-block:: python
primes = []
for number in numbers:
if is_prime(number):
primes.append(number)
if primes:
touca.check("prime numbers", primes)
touca.check("number of primes", len(primes))
The items added to the list are not required to be of the same type.
The following code is acceptable:
.. code-block:: python
touca.check("prime numbers", 42)
touca.check("prime numbers", "forty three")
:raises RuntimeError:
if specified key is already associated with
a test result which was not iterable
:param key: name to be associated with the logged test result
:param value: element to be appended to the array
:see also: :py:meth:`~check`
"""
if key not in self._results:
self._results[key] = ResultEntry(typ=ResultCategory.Check, val=VectorType())
vec = self._results.get(key)
if vec.typ is not ResultCategory.Check or not isinstance(vec.val, VectorType):
raise RuntimeError("specified key has a different type")
vec.val.add(value)
def add_hit_count(self, key: str):
"""
Increments value of key every time it is executed.
creates the key with initial value of one if it does not exist.
Could be considered as a helper utility function.
This method is particularly helpful to track variables whose values
are determined in loops with indeterminate execution cycles:
.. code-block:: python
for number in numbers:
if is_prime(number):
touca.add_array_element("prime numbers", number)
touca.add_hit_count("number of primes")
This pattern can be considered as a syntactic sugar for the following
alternative:
.. code-block:: python
primes = []
for number in numbers:
if is_prime(number):
primes.append(number)
if primes:
touca.check("prime numbers", primes)
touca.check("number of primes", len(primes))
:raises RuntimeError:
if specified key is already associated with
a test result which was not an integer
:param key: name to be associated with the logged test result
:see also: :py:meth:`~check`
"""
if key not in self._results:
self._results[key] = ResultEntry(
typ=ResultCategory.Check, val=IntegerType(1)
)
return
value = self._results.get(key)
if value.typ is not ResultCategory.Check or not isinstance(
value.val, IntegerType
):
raise RuntimeError("specified key has a different type")
value.val._value += 1
def add_metric(self, key: str, milliseconds: int):
"""
Adds an already obtained measurements to the list of captured
performance benchmarks.
Useful for logging a metric that is measured without using this SDK.
:param key: name to be associated with this performance benchmark
:param milliseconds: duration of this measurement in milliseconds
"""
value = datetime.now()
self._tics[key] = value
self._tocs[key] = value + timedelta(microseconds=milliseconds * 1000)
def start_timer(self, key: str):
"""
Starts timing an event with the specified name.
Measurement of the event is only complete when function
:py:meth:`~stop_timer` is later called for the specified name.
:param key: name to be associated with the performance metric
"""
self._tics[key] = datetime.now()
def stop_timer(self, key: str):
"""
Stops timing an event with the specified name.
Expects function :py:meth:`~start_timer` to have been called previously
with the specified name.
:param key: name to be associated with the performance metric
"""
if key in self._tics:
self._tocs[key] = datetime.now()
def _metrics(self) -> Tuple[str, ToucaType]:
for key, tic in self._tics.items():
if key not in self._tocs:
continue
diff = (self._tocs.get(key) - tic).microseconds / 1000
yield key, IntegerType(int(diff))
def _metadata(self) -> Dict[str, str]:
return {
"teamslug": self._meta.get("team") or "unknown",
"testsuite": self._meta.get("suite") or "unknown",
"version": self._meta.get("version") or "unknown",
"testcase": self._meta.get("name") or "unknown",
"builtAt": datetime.now().isoformat(),
}
def json(self):
return {
"metadata": self._metadata(),
"results": [
{"key": k, "value": v.val.json()}
for k, v in self._results.items()
if v.typ is ResultCategory.Check
],
"assertions": [
{"key": k, "value": v.val.json()}
for k, v in self._results.items()
if v.typ is ResultCategory.Assert
],
"metrics": [{"key": k, "value": v.json()} for k, v in self._metrics()],
}
def serialize(self) -> bytearray:
from flatbuffers import Builder
import touca._schema as schema
dicts = {
ResultCategory.Check: schema.ResultType.Check,
ResultCategory.Assert: schema.ResultType.Assert,
}
builder = Builder(1024)
metadata = {k: builder.CreateString(v) for k, v in self._metadata().items()}
schema.MetadataStart(builder)
schema.MetadataAddTeamslug(builder, metadata.get("teamslug"))
schema.MetadataAddTestsuite(builder, metadata.get("testsuite"))
schema.MetadataAddVersion(builder, metadata.get("version"))
schema.MetadataAddTestcase(builder, metadata.get("testcase"))
schema.MetadataAddBuiltAt(builder, metadata.get("builtAt"))
fbs_metadata = schema.MetadataEnd(builder)
result_entries = []
for k, v in self._results.items():
fbs_key = Builder.CreateString(builder, k)
fbs_value = v.val.serialize(builder)
schema.ResultStart(builder)
schema.ResultAddKey(builder, fbs_key)
schema.ResultAddValue(builder, fbs_value)
schema.ResultAddTyp(builder, dicts.get(v.typ))
result_entries.append(schema.ResultEnd(builder))
schema.ResultsStartEntriesVector(builder, len(result_entries))
for item in reversed(result_entries):
builder.PrependUOffsetTRelative(item)
fbs_result_entries = builder.EndVector()
schema.ResultsStart(builder)
schema.ResultsAddEntries(builder, fbs_result_entries)
fbs_results = schema.ResultsEnd(builder)
metric_entries = []
for k, v in self._metrics():
fbs_key = Builder.CreateString(builder, k)
fbs_value = v.serialize(builder)
schema.MetricStart(builder)
schema.MetricAddKey(builder, fbs_key)
schema.MetricAddValue(builder, fbs_value)
metric_entries.append(schema.MetricEnd(builder))
schema.MetricsStartEntriesVector(builder, len(metric_entries))
for item in reversed(metric_entries):
builder.PrependUOffsetTRelative(item)
fbs_metric_entries = builder.EndVector()
schema.MetricsStart(builder)
schema.MetricsAddEntries(builder, fbs_metric_entries)
fbs_metrics = schema.MetricsEnd(builder)
schema.MessageStart(builder)
schema.MessageAddMetadata(builder, fbs_metadata)
schema.MessageAddResults(builder, fbs_results)
schema.MessageAddMetrics(builder, fbs_metrics)
fbs_message = schema.MessageEnd(builder)
builder.Finish(fbs_message)
return builder.Output()
| 35.376238 | 88 | 0.612184 | 3.21875 |
198f09ec70a864f74973126efad673b62438636e | 2,138 | lua | Lua | gateway/src/apicast/loader.lua | SpillChek2/apicast | 55011497b6a2badd1ca1097c4d5221a842d80faf | [
"Apache-2.0"
] | null | null | null | gateway/src/apicast/loader.lua | SpillChek2/apicast | 55011497b6a2badd1ca1097c4d5221a842d80faf | [
"Apache-2.0"
] | null | null | null | gateway/src/apicast/loader.lua | SpillChek2/apicast | 55011497b6a2badd1ca1097c4d5221a842d80faf | [
"Apache-2.0"
] | 1 | 2018-04-09T08:45:39.000Z | 2018-04-09T08:45:39.000Z | --- APIcast source loader
-- Loading this module will add a new source code loaders to package.searchers.
-- The searcher is going to print deprecation warnings when apicast source is loaded
-- through old or non prefixed paths.
-- We can rename files and set up an alias here so we don't break customer's code and
-- print a deprecation warning.
-- Another searcher is going to look for policies with `.policy` suffix.
-- Policies can be packaged as `some_name/policy.lua` so the directory also contains the JSON spec.
local loadfile = loadfile
local sub = string.sub
local policy_loader = require 'apicast.policy_loader'
local map = {
['apicast'] = 'apicast.policy.apicast'
}
local function loader(name, path)
local file, err = package.searchpath(name, path)
if file then
file, err = loadfile(file)
end
return file, err
end
--- Searcher has to return the loader or an error message.
local function policy_searcher(name)
if sub(name, 1, 15) == 'apicast.policy.' then
local mod = policy_loader:pcall(sub(name, 16), 'builtin')
if mod then return function () return mod end end
end
end
local function prefix_loader(name, path)
local prefixed = 'apicast.' .. name
local found, err = loader(prefixed, path)
if not found then
found = policy_searcher(prefixed)
end
if found then
ngx.log(ngx.STDERR, 'DEPRECATION: when loading apicast code use correct prefix: require("', prefixed, '")')
end
return found or err
end
local function rename_loader(name, path)
local new = map[name]
local found, err = policy_searcher(new)
if not found then
found = loader(new, path)
end
if found then
ngx.log(ngx.WARN, 'DEPRECATION: file renamed - change: require("', name, '")' ,' to: require("', new, '")')
end
return found or err
end
local function apicast_namespace(name)
local path = package.path
if not package.searchpath(name, path) then
if map[name] then
return rename_loader(name, path)
else
return prefix_loader(name, path)
end
end
end
table.insert(package.searchers, policy_searcher)
table.insert(package.searchers, apicast_namespace)
| 26.073171 | 111 | 0.717025 | 3.125 |
85cb68d2f275c7ab63531b3fa937fa3e509ed2a7 | 1,258 | c | C | sdk/openrtos/boot/fa626/lcd_clear.c | doyaGu/C0501Q_HWJL01 | 07a71328bd9038453cbb1cf9c276a3dd1e416d63 | [
"MIT"
] | 1 | 2021-10-09T08:05:50.000Z | 2021-10-09T08:05:50.000Z | sdk/openrtos/boot/fa626/lcd_clear.c | doyaGu/C0501Q_HWJL01 | 07a71328bd9038453cbb1cf9c276a3dd1e416d63 | [
"MIT"
] | null | null | null | sdk/openrtos/boot/fa626/lcd_clear.c | doyaGu/C0501Q_HWJL01 | 07a71328bd9038453cbb1cf9c276a3dd1e416d63 | [
"MIT"
] | null | null | null | #include "ite/ith.h"
#define REMAP_ADDR 0x80000000
// _start is default function name of entry point.
void _start(void)
{
uint32_t* ptr;
uint32_t size;
uint32_t color, i;
asm volatile("mcr p15, 0, %0, c7, c14, 0" : : "r"(0)); // clean and invalidate D-Cache all
asm volatile("mcr p15, 0, %0, c7, c5, 0" : : "r"(0)); // invalidate I-Cache all
ptr = (uint32_t*)(ithLcdGetBaseAddrA() + REMAP_ADDR);
size = ithLcdGetPitch() * ithLcdGetHeight();
#if CFG_LCD_BPP == 2
color = ITH_RGB565((CFG_LCD_BOOT_BGCOLOR >> 16) & 0xFF, (CFG_LCD_BOOT_BGCOLOR >> 8) & 0xFF, CFG_LCD_BOOT_BGCOLOR & 0xFF);
color |= color << 16;
#elif CFG_LCD_BPP == 4
color = CFG_LCD_BOOT_BGCOLOR;
#elif CFG_LCD_BPP == 0
#error "0 LCD BPP"
#else
#error "Unknown LCD BPP"
#endif
for (i = 0; i < size / (sizeof(uint32_t)*8); i++)
{
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
*ptr++ = color;
// FIXME: workaround for IT9850
#if (CFG_CHIP_FAMILY == 9850)
{
asm volatile("mcr p15, 0, %0, c7, c10, 4" : : "r"(0)); // sync (drain write buffer)
}
#endif
}
}
| 26.208333 | 125 | 0.556439 | 3.078125 |
0cc14f945ff11b1ec78d14d582d03623e82355fd | 4,657 | py | Python | tools/multiscale_shape.py | marvin-eisenberger/hamiltonian-interpolation | d18c2f401feffc672998c5fa1d50c1de03dba902 | [
"MIT"
] | 5 | 2021-01-05T23:16:55.000Z | 2021-07-23T12:26:06.000Z | tools/multiscale_shape.py | marvin-eisenberger/hamiltonian-interpolation | d18c2f401feffc672998c5fa1d50c1de03dba902 | [
"MIT"
] | null | null | null | tools/multiscale_shape.py | marvin-eisenberger/hamiltonian-interpolation | d18c2f401feffc672998c5fa1d50c1de03dba902 | [
"MIT"
] | 1 | 2021-02-22T08:31:05.000Z | 2021-02-22T08:31:05.000Z | import torch
from shape_utils import Shape, load_shape_pair, scatter_shape_pair
from torch_geometric.nn import knn
from param import *
from arap_potential import arap_vert
def load_multiscale_shapes(folder_path, file_name, scales, offset=0.5*torch.ones([3], device=device, dtype=torch.float32)):
"""Like 'load_shape_pair' but for shapes with different resolutions"""
vert_x_array = []
triv_x_array = []
vert_y_array = []
triv_y_array = []
for i_scale in range(len(scales)):
file_load = folder_path + "sub_" + str(scales[i_scale]) + "/" + file_name
shape_x, shape_y = load_shape_pair(file_load, offset)
vert_x_array.append(shape_x.vert)
vert_y_array.append(shape_y.vert)
triv_x_array.append(shape_x.triv)
triv_y_array.append(shape_y.triv)
shape_x = MultiscaleShape(vert_x_array, triv_x_array)
shape_y = MultiscaleShape(vert_y_array, triv_y_array)
return shape_x, shape_y
class MultiscaleShape(Shape):
"""Class for shapes with multiple resolutions.
Attributes beyond the base class 'Shape' are:
vert_array: List of vertices with different resolutions
triv_array: List of triangles with different resolutions
scale_idx: The index describing the current resolution --
The current vertices are vert_array[scale_idx]
ass_[array/vecs/weights]: attributes needed to apply an interpolation
on scale 'scale_idx' to the next resolution '(scale_idx+1)'
"""
def __init__(self, vert_array, triv_array):
super().__init__(vert_array[0], triv_array[0])
self.vert_array = vert_array
self.triv_array = triv_array
self.scale_idx = 0
self.scale_idx_len = len(vert_array)
self.ass_array = None
self.ass_vecs = None
self.ass_weights = None
self.init_upscale()
def set_scale_idx(self, scale_idx):
assert scale_idx >= 0 and scale_idx < self.scale_idx_len, "new index out of bounds"
self.vert_array[self.scale_idx] = self.vert
self.scale_idx = scale_idx
self.vert = self.vert_array[scale_idx]
self.triv = self.triv_array[scale_idx]
self.samples = list(range(self.vert.shape[0]))
self.neigh = None
def increase_scale_idx(self):
self.set_scale_idx(self.scale_idx+1)
def next_resolution(self):
return self.vert_array[self.scale_idx+1].shape
def init_upscale(self, num_knn=3):
self.ass_array = []
self.ass_vecs = []
self.ass_weights = []
for idx in range(self.scale_idx_len-1):
vert_i = self.vert_array[idx].to(device_cpu)
vert_ip1 = self.vert_array[idx+1].to(device_cpu)
ass_curr = knn(vert_i, vert_ip1, num_knn)
ass_curr = ass_curr[1, :].view(-1, num_knn)
self.ass_array.append(ass_curr.to(device)) #[n_vert_tp1, num_knn]
vec_curr = vert_ip1.unsqueeze(1) - vert_i[ass_curr, :]
self.ass_vecs.append(vec_curr.to(device)) #[n_vert_tp1, num_knn, 3]
weights_curr = 1/(torch.norm(vec_curr, dim=2, keepdim=True)+1e-5)
weights_curr = weights_curr / torch.sum(weights_curr, dim=1, keepdim=True)
self.ass_weights.append(weights_curr.to(device)) #[n_vert_tp1, num_knn, 1]
def apply_upsampling(self, vert_t):
R = arap_vert(vert_t, self.vert, self.get_neigh()) #[n_vert_tp1, 3, 3]
ass_curr = self.ass_array[self.scale_idx]
vec_curr = self.ass_vecs[self.scale_idx]
weights_curr = self.ass_weights[self.scale_idx]
vert_tp1 = vert_t[ass_curr, :] + torch.matmul(R[ass_curr], vec_curr.unsqueeze(3)).squeeze() #[n_vert_tp1, num_knn, 3]
vert_tp1 = torch.sum(weights_curr * vert_tp1, dim=1)
return vert_tp1
def rotate(self, R):
for i in range(self.scale_idx_len):
self.vert_array[i] = torch.mm(self.vert_array[i], R.transpose(0, 1))
self.vert = self.vert_array[self.scale_idx]
self.init_upscale()
def to_box(self, shape_y):
scale_idx = self.scale_idx
for i in range(self.scale_idx_len):
self.set_scale_idx(i)
shape_y.set_scale_idx(i)
super().to_box(shape_y)
self.set_scale_idx(scale_idx)
shape_y.set_scale_idx(scale_idx)
self.init_upscale()
def scale(self, factor, shift=True):
scale_idx = self.scale_idx
for i in range(self.scale_idx_len):
self.set_scale_idx(i)
super().scale(factor, shift)
self.set_scale_idx(scale_idx)
self.init_upscale()
if __name__ == "__main__":
print("main of multiscale_shape.py")
| 33.503597 | 126 | 0.665235 | 3.34375 |
f05bdaed59cf5073cab62db01710a16ba5ff7771 | 7,597 | py | Python | app/views.py | PaulMurrayCbr/GameNight | 838c19dda765027abbe8e12e331268b01cb859c2 | [
"Unlicense"
] | null | null | null | app/views.py | PaulMurrayCbr/GameNight | 838c19dda765027abbe8e12e331268b01cb859c2 | [
"Unlicense"
] | null | null | null | app/views.py | PaulMurrayCbr/GameNight | 838c19dda765027abbe8e12e331268b01cb859c2 | [
"Unlicense"
] | null | null | null | from app import app, db
from flask import render_template, flash, redirect, get_flashed_messages
import forms
import models
import Character
from flask.globals import request
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
def menugear() :
return {
'pcs': models.Character.query.all()
}
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html', menu=menugear())
@app.route('/whiteboard')
def whiteboard():
return render_template('whiteboard.html', menu=menugear())
@app.route('/pc/<name>/')
def character(name):
try:
pc = models.Character.query.filter_by(name=name).one()
updatepc_form=forms.PC(obj=pc)
newhp_form=forms.HP()
openhpbreakdown = False
states = get_flashed_messages(category_filter=['viewstate'])
if states:
for state in states:
if state['hpopen']:
openhpbreakdown = True
return render_template('pc.html',
updatepc_form=updatepc_form,
newhp_form = newhp_form,
pc=pc,
pcinfo=Character.buildInfo(pc),
menu=menugear(),
openhpbreakdown = openhpbreakdown)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/update.do', methods=['POST'])
def do_updatepc(name):
try:
pc = models.Character.query.filter_by(name=name).one()
updatepc_form=forms.PC(obj=pc)
pc.abbrev = updatepc_form.abbrev.data
pc.name = updatepc_form.name.data
pc.pname = updatepc_form.pname.data
db.session.commit()
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/addhptype.do', methods=['POST'])
def do_addhptypepc(name):
try:
pc = models.Character.query.filter_by(name=name).one()
newhp_form=forms.HP(obj=pc)
hp = models.Hp(
character_id = pc.id,
source = newhp_form.source.data,
max = newhp_form.max.data,
current = newhp_form.max.data,
ablative_only = newhp_form.ablative_only.data
)
db.session.add(hp)
db.session.commit()
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/set.do', methods=['GET', 'POST'])
def do_sethppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
v = request.args.get('v', '')
if not v or v == '':
flash(("no new value specified" , 'warning'), 'msg')
else:
try:
v = int(v)
except ValueError, e:
flash(("'%s' does not appear to be a number" % v, 'warning'), 'msg')
hp.current = v
db.session.commit()
flash(("Set current to %d" % v , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/max.do', methods=['GET', 'POST'])
def do_maxhppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
v = request.args.get('v', '')
if not v or v == '':
flash(("no new value specified" , 'warning'), 'msg')
else:
try:
v = int(v)
except ValueError, e:
flash(("'%s' does not appear to be a number" % v, 'warning'), 'msg')
hp.max = v
db.session.commit()
flash(("Set max to %d" % v , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/add.do', methods=['GET', 'POST'])
def do_addhppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
v = request.args.get('v', '')
if not v or v == '':
flash(("no new value specified" , 'warning'), 'msg')
else:
try:
v = int(v)
except ValueError, e:
flash(("'%s' does not appear to be a number" % v, 'warning'), 'msg')
hp.current += v
db.session.commit()
if v < 0:
flash(("Subtracted %d" % -v , 'success'), 'msg')
else:
flash(("Added %d" % v , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/pc/<name>/hp/<id>/zap.do', methods=['GET', 'POST'])
def do_zaphppc(name, id):
try:
pc = models.Character.query.filter_by(name=name).one()
hp = models.Hp.query.get(id)
if not hp:
flash(("HP %s not found" % id , 'danger'), 'msg')
elif hp.character_id != pc.id:
flash(("HP %s belongs to %s" % (id, hp.character.name) , 'danger'), 'msg')
else:
db.session.delete(hp)
db.session.commit()
flash(("Deleted" , 'success'), 'msg')
flash({'hpopen':True}, 'viewstate')
return redirect('/pc/%s' % pc.name)
except MultipleResultsFound, e:
flash(('Found multiple characters named %s' % name, 'danger'), 'msg')
pc = None
except NoResultFound, e:
flash(('PC %s not found' % name, 'warning'), 'msg')
pc = None
return redirect('/')
@app.route('/admin/pc/')
def adminpc():
pcs = models.Character.query.all()
return render_template('/admin/pcs.html',
pcs=pcs,
newpc_form=forms.PC(),
menu=menugear())
@app.route('/admin/pc/newpc.do', methods=['POST'])
def do_newpc():
form = forms.PC(request.form)
pc = models.Character(name=form.name.data, pname=form.pname.data, abbrev=form.abbrev.data)
db.session.add(pc)
db.session.commit()
flash(("New PC", 'success'), 'msg')
return redirect('/admin/pc/')
@app.route('/admin/pc/<id>/delete.do', methods=['GET'])
def do_deletepc(id):
pc = models.Character.query.get(id)
if not pc:
flash(("PC %s not found" % id , 'danger'), 'msg')
else :
db.session.delete(pc)
db.session.commit()
flash(("PC '%s' deleted" % pc.name , 'success'), 'msg')
return redirect('/admin/pc/')
| 26.939716 | 94 | 0.612874 | 3.03125 |
e73fffbbf34519bd85db2a58a307b41246e8a610 | 1,101 | js | JavaScript | src/0648.replace-words.648/0648.replace-words.648.js | jiangshanmeta/meta | 8f9d084cda91988d42208ac7a029612e9edc693b | [
"MIT"
] | 221 | 2018-10-26T07:05:12.000Z | 2022-03-30T03:23:10.000Z | src/0648.replace-words.648/0648.replace-words.648.js | ralap18/meta | 82d660a6eabb15e398a7dcc2a0fa99342143bb12 | [
"MIT"
] | 23 | 2018-09-24T14:50:58.000Z | 2020-09-17T14:23:45.000Z | src/0648.replace-words.648/0648.replace-words.648.js | ralap18/meta | 82d660a6eabb15e398a7dcc2a0fa99342143bb12 | [
"MIT"
] | 45 | 2019-03-29T03:36:19.000Z | 2022-03-25T20:57:13.000Z | /**
* @param {string[]} dict
* @param {string} sentence
* @return {string}
*/
// 前缀树
function findRoot (trie, string) {
const list = [];
for (let i = 0; i < string.length; i++) {
if (!trie) {
break;
}
list.push(string[i]);
trie = trie[string[i]];
}
return trie === null ? list.join('') : '';
}
var replaceWords = function (dict, sentence) {
const trie = {};
// 因为有多个root时要按短的算 先按长度从小到大排序
dict.sort((a, b) => a.length - b.length);
for (let i = 0; i < dict.length; i++) {
const string = dict[i];
// 考虑到dict中有的单词以dict中其他单词为根
if (findRoot(trie, string) === '') {
let prev = trie;
for (let j = 0; j < string.length - 1; j++) {
prev = prev[string[j]] || (prev[string[j]] = {});
}
// 最后一个字母 null结尾 便于在前缀树中确定结尾
prev[string[string.length - 1]] = null;
}
}
return sentence.split(' ').map((string) => {
const root = findRoot(trie, string);
return root === '' ? string : root;
}).join(' ');
};
| 26.214286 | 65 | 0.482289 | 3.234375 |
7a97019e955f84d6270a6a63776af848923fd024 | 2,666 | rs | Rust | engine/src/math/vector4.rs | monadgroup/re19 | 80989ebf8ae2a3e203a443e583a7f359f0114333 | [
"Apache-2.0"
] | 47 | 2021-10-04T13:51:31.000Z | 2022-03-27T17:23:50.000Z | engine/src/math/vector4.rs | monadgroup/re19 | 80989ebf8ae2a3e203a443e583a7f359f0114333 | [
"Apache-2.0"
] | null | null | null | engine/src/math/vector4.rs | monadgroup/re19 | 80989ebf8ae2a3e203a443e583a7f359f0114333 | [
"Apache-2.0"
] | null | null | null | use super::{Float, Vector3};
use core::ops;
#[derive(Clone, Copy, PartialEq, PartialOrd, Default, Debug)]
#[repr(C)]
pub struct Vector4 {
pub x: f32,
pub y: f32,
pub z: f32,
pub w: f32,
}
impl Vector4 {
pub fn with_x(self, x: f32) -> Self {
Vector4 { x, ..self }
}
pub fn with_y(self, y: f32) -> Self {
Vector4 { y, ..self }
}
pub fn with_z(self, z: f32) -> Self {
Vector4 { z, ..self }
}
pub fn with_w(self, w: f32) -> Self {
Vector4 { w, ..self }
}
pub fn dot(self, other: Vector4) -> f32 {
self.x * other.x + self.y * other.y + self.z * other.z + self.w * other.w
}
pub fn length_squared(self) -> f32 {
self.dot(self)
}
pub fn length(self) -> f32 {
self.length_squared().sqrt()
}
pub fn unit(self) -> Vector4 {
self / Vector4::from(self.length())
}
pub fn lerp(self, other: Vector4, t: f32) -> Vector4 {
self + (other - self) * t
}
pub fn unproject(self) -> Vector3 {
Vector3 {
x: self.x / self.w,
y: self.y / self.w,
z: self.z / self.w,
}
}
pub fn as_vec3(self) -> Vector3 {
Vector3 {
x: self.x,
y: self.y,
z: self.z,
}
}
pub fn floor(self) -> Vector4 {
Vector4 {
x: self.x.floor(),
y: self.y.floor(),
z: self.z.floor(),
w: self.w.floor(),
}
}
pub fn fract(self) -> Vector4 {
Vector4 {
x: self.x.fract(),
y: self.y.fract(),
z: self.z.fract(),
w: self.w.fract(),
}
}
}
define_vec!(Vector4 => (x, y, z, w));
impl ops::Neg for Vector4 {
type Output = Vector4;
fn neg(self) -> Vector4 {
Vector4 {
x: -self.x,
y: -self.y,
z: -self.z,
w: -self.w,
}
}
}
impl From<[f32; 4]> for Vector4 {
fn from([x, y, z, w]: [f32; 4]) -> Self {
Vector4 { x, y, z, w }
}
}
impl From<(f32, f32, f32, f32)> for Vector4 {
fn from((x, y, z, w): (f32, f32, f32, f32)) -> Self {
Vector4 { x, y, z, w }
}
}
impl From<f32> for Vector4 {
fn from(val: f32) -> Self {
Vector4 {
x: val,
y: val,
z: val,
w: val,
}
}
}
impl Into<[f32; 4]> for Vector4 {
fn into(self) -> [f32; 4] {
[self.x, self.y, self.z, self.w]
}
}
impl Into<(f32, f32, f32, f32)> for Vector4 {
fn into(self) -> (f32, f32, f32, f32) {
(self.x, self.y, self.z, self.w)
}
}
| 19.895522 | 81 | 0.445236 | 3.078125 |
7541b62a34467e2119df5125dde81063db36ce24 | 2,467 | rs | Rust | core/src/eval/arithmetic.rs | contractshark/rust-cevm | 35cdefb760d41197ccfadc8c446343f20eba9080 | [
"Apache-2.0"
] | 47 | 2020-08-01T19:50:19.000Z | 2022-03-29T16:23:40.000Z | core/src/eval/arithmetic.rs | gakonst/rust-cevm | 35cdefb760d41197ccfadc8c446343f20eba9080 | [
"Apache-2.0"
] | null | null | null | core/src/eval/arithmetic.rs | gakonst/rust-cevm | 35cdefb760d41197ccfadc8c446343f20eba9080 | [
"Apache-2.0"
] | 4 | 2020-12-30T06:43:30.000Z | 2021-09-08T11:41:14.000Z | use crate::utils::I256;
use core::convert::TryInto;
use core::ops::Rem;
use primitive_types::{U256, U512};
pub fn div(op1: U256, op2: U256) -> U256 {
if op2 == U256::zero() {
U256::zero()
} else {
op1 / op2
}
}
pub fn sdiv(op1: U256, op2: U256) -> U256 {
let op1: I256 = op1.into();
let op2: I256 = op2.into();
let ret = op1 / op2;
ret.into()
}
pub fn rem(op1: U256, op2: U256) -> U256 {
if op2 == U256::zero() {
U256::zero()
} else {
op1.rem(op2)
}
}
pub fn srem(op1: U256, op2: U256) -> U256 {
if op2 == U256::zero() {
U256::zero()
} else {
let op1: I256 = op1.into();
let op2: I256 = op2.into();
let ret = op1.rem(op2);
ret.into()
}
}
pub fn addmod(op1: U256, op2: U256, op3: U256) -> U256 {
let op1: U512 = op1.into();
let op2: U512 = op2.into();
let op3: U512 = op3.into();
if op3 == U512::zero() {
U256::zero()
} else {
let v = (op1 + op2) % op3;
v.try_into()
.expect("op3 is less than U256::max_value(), thus it never overflows; qed")
}
}
pub fn mulmod(op1: U256, op2: U256, op3: U256) -> U256 {
let op1: U512 = op1.into();
let op2: U512 = op2.into();
let op3: U512 = op3.into();
if op3 == U512::zero() {
U256::zero()
} else {
let v = (op1 * op2) % op3;
v.try_into()
.expect("op3 is less than U256::max_value(), thus it never overflows; qed")
}
}
pub fn exp(op1: U256, op2: U256) -> U256 {
let mut op1 = op1;
let mut op2 = op2;
let mut r: U256 = 1.into();
while op2 != 0.into() {
if op2 & 1.into() != 0.into() {
r = r.overflowing_mul(op1).0;
}
op2 >>= 1;
op1 = op1.overflowing_mul(op1).0;
}
r
}
pub fn signextend(op1: U256, op2: U256) -> U256 {
if op1 > U256::from(32) {
op2
} else {
let mut ret = U256::zero();
let len: usize = op1.as_usize();
let t: usize = 8 * (len + 1) - 1;
let t_bit_mask = U256::one() << t;
let t_value = (op2 & t_bit_mask) >> t;
for i in 0..256 {
let bit_mask = U256::one() << i;
let i_value = (op2 & bit_mask) >> i;
if i <= t {
ret = ret.overflowing_add(i_value << i).0;
} else {
ret = ret.overflowing_add(t_value << i).0;
}
}
ret
}
}
| 23.495238 | 87 | 0.478719 | 3.21875 |
7f1887ad8207d20aadee5914b8aa3df9a823555d | 5,156 | go | Go | request/queue.go | KillianMeersman/wander | e8fab5fd01aad4be4cf835a6c6447e2491a51bc1 | [
"MIT"
] | null | null | null | request/queue.go | KillianMeersman/wander | e8fab5fd01aad4be4cf835a6c6447e2491a51bc1 | [
"MIT"
] | null | null | null | request/queue.go | KillianMeersman/wander | e8fab5fd01aad4be4cf835a6c6447e2491a51bc1 | [
"MIT"
] | null | null | null | package request
import (
"fmt"
"io"
"sync"
)
type QueueResult struct {
Error error
Request *Request
}
// Queue is a prioritized FIFO queue for requests
type Queue interface {
io.Closer
// Enqueue adds the request to the queue, returns an error if no more space is available.
Enqueue(req *Request, priority int) error
// Dequeue pops the highest priority request from the queue.
Dequeue() <-chan QueueResult
// Count returns the amount of queued requests.
Count() (int, error)
Clear()
}
// QueueMaxSize signals the Queue has reached its maximum size.
type QueueMaxSize struct {
size int
}
func (r QueueMaxSize) Error() string {
return fmt.Sprintf("Request queue has reached maximum size of %d", r.size)
}
type heapNode struct {
priority int
insertionCount int
request *Request
}
func less(a, b heapNode) bool {
if a.priority < b.priority {
return true
}
if a.priority == b.priority {
if a.insertionCount > b.insertionCount {
return true
}
}
return false
}
// RequestHeapQueue is a heap implementation for request.Queue.
type RequestHeapQueue struct {
data []heapNode
count int
maxSize int
insertionCount int
lock *sync.Mutex
waitCondition *sync.Cond
waitGroup *sync.WaitGroup
isDone bool
}
// NewRequestHeap returns a request heap (priority queue).
func NewRequestHeap(maxSize int) *RequestHeapQueue {
lock := &sync.Mutex{}
heap := &RequestHeapQueue{
data: make([]heapNode, maxSize/10),
maxSize: maxSize,
lock: lock,
waitCondition: sync.NewCond(lock),
waitGroup: &sync.WaitGroup{},
isDone: false,
}
return heap
}
// BuildHeap builds a request heap from existing data.
func BuildHeap(data []heapNode, maxSize int) *RequestHeapQueue {
heap := NewRequestHeap(maxSize)
for i := len(data) / 2; i >= 0; i-- {
heap.maxHeapify(i)
}
return heap
}
// Enqueue a request with the given priority.
func (r *RequestHeapQueue) Enqueue(req *Request, priority int) error {
r.lock.Lock()
defer r.lock.Unlock()
return r.insert(req, priority)
}
func (r *RequestHeapQueue) Dequeue() <-chan QueueResult {
outlet := make(chan QueueResult)
go func() {
r.waitGroup.Add(1)
r.waitCondition.L.Lock()
// wait untl an item is available or Close is called
for r.count < 1 && !r.isDone {
r.waitCondition.Wait()
}
if r.isDone {
r.waitCondition.L.Unlock()
} else {
req := r.extract()
r.waitCondition.L.Unlock()
outlet <- QueueResult{
Request: req,
}
}
r.waitGroup.Done()
}()
return outlet
}
func (r *RequestHeapQueue) Close() error {
r.isDone = true
r.waitCondition.Broadcast()
r.waitGroup.Wait()
return nil
}
func (r *RequestHeapQueue) Clear() {
for i := range r.data {
r.data[i] = heapNode{}
}
}
// Count returns the amount of requests in the queue.
func (r *RequestHeapQueue) Count() (int, error) {
return r.count, nil
}
// insert a request.
func (r *RequestHeapQueue) insert(req *Request, priority int) error {
node := heapNode{
priority: priority,
request: req,
insertionCount: r.insertionCount + 1,
}
if r.count >= len(r.data) {
newSize := (len(r.data) * 2) + 1
if newSize > r.maxSize {
if r.count == r.maxSize {
return &QueueMaxSize{size: r.maxSize}
}
newSize = r.maxSize
}
data := make([]heapNode, newSize)
copy(data, r.data)
r.data = data
}
i := r.count
parent := parentIndex(i)
r.data[i] = node
for i > 0 && r.data[i].priority > r.data[parent].priority {
r.data[i], r.data[parent] = r.data[parent], r.data[i]
i = parentIndex(i)
parent = parentIndex(i)
}
r.count++
r.insertionCount++
r.waitCondition.Signal()
return nil
}
// extract the root node and replace it with the last element, then sift down.
func (r *RequestHeapQueue) extract() *Request {
req := r.data[0].request
r.count--
r.data[0] = r.data[r.count]
r.maxHeapify(0)
return req
}
// Sort the heap so that the highest priority request is the root node
// Starts from i (array index) and sifts down, swapping nodes as nescesary along the way
func (r *RequestHeapQueue) maxHeapify(i int) {
max := i
for {
// get the children and set the current max value to the starting node
left := leftChildIndex(i)
right := rightChildIndex(i)
// if left child is not the last node and is less than the parent node, set max to this node index
if left < r.count && less(r.data[max], r.data[left]) {
max = left
}
// same thing, but with right child
if right < r.count && less(r.data[max], r.data[right]) {
max = right
}
// stop sifting if no swap occured, the heap is sorted
if max == i {
return
}
// if a swap occured, swap the actual data and continue sifting into the next node
r.data[i], r.data[max] = r.data[max], r.data[i]
i = max
}
}
// get the index of the left child node
func leftChildIndex(i int) int {
return (i * 2) + 1
}
// get the index of the right child node
func rightChildIndex(i int) int {
return (i * 2) + 2
}
// get the index of the parent node
func parentIndex(i int) int {
parent := ((i + 1) / 2) - 1
if parent < 0 {
return 0
}
return parent
}
| 21.663866 | 100 | 0.662917 | 3.328125 |
b4e0e572c835c484c0f44eb853b3fc2721f8cb63 | 1,033 | kt | Kotlin | roboquant-core/test/feeds/TestFeedTest.kt | jetonbacaj/roboquant | 11136be3cb62870532dc7e1888c5ea88ba17bcab | [
"Apache-2.0"
] | null | null | null | roboquant-core/test/feeds/TestFeedTest.kt | jetonbacaj/roboquant | 11136be3cb62870532dc7e1888c5ea88ba17bcab | [
"Apache-2.0"
] | null | null | null | roboquant-core/test/feeds/TestFeedTest.kt | jetonbacaj/roboquant | 11136be3cb62870532dc7e1888c5ea88ba17bcab | [
"Apache-2.0"
] | null | null | null | package org.roboquant.feeds
import kotlinx.coroutines.*
import org.junit.Test
import kotlin.test.*
import org.roboquant.TestData
import org.roboquant.common.Background
import org.roboquant.common.TimeFrame
import org.roboquant.feeds.test.TestFeed
fun play(feed:Feed, timeFrame: TimeFrame = TimeFrame.FULL): EventChannel {
val channel = EventChannel(timeFrame = timeFrame)
Background.ioJob {
feed.play(channel)
channel.close()
}
return channel
}
internal class TestFeedTest {
@Test
fun testTestFeed() = runBlocking{
val feed = TestFeed(5..9)
var cnt = 0
for (step in play(feed)) {
cnt++
}
assertEquals(5, cnt)
}
@Test
fun testTestFeedWithItems() = runBlocking{
val feed = TestFeed(120..130, 130 downTo 120, asset = TestData.euStock())
var cnt = 0
for (step in play(feed)) {
cnt++
assertTrue(step.actions.first() is PriceAction)
}
assertEquals(22, cnt)
}
} | 23.477273 | 81 | 0.631171 | 3.140625 |
af6f779fbe9f9d336e37d83d5ee4446277505939 | 1,614 | rb | Ruby | features/step_definitions/work_unit_steps.rb | AdGitHub2023/xrono | 1cdaebe285ee6115e948c7c0a43f34e2393d4b4a | [
"MIT"
] | 11 | 2015-03-16T14:45:03.000Z | 2019-02-09T08:03:41.000Z | features/step_definitions/work_unit_steps.rb | AdGitHub2023/xrono | 1cdaebe285ee6115e948c7c0a43f34e2393d4b4a | [
"MIT"
] | 3 | 2015-03-11T21:29:36.000Z | 2018-06-26T17:38:05.000Z | features/step_definitions/work_unit_steps.rb | AdGitHub2023/xrono | 1cdaebe285ee6115e948c7c0a43f34e2393d4b4a | [
"MIT"
] | 2 | 2015-12-13T17:33:51.000Z | 2019-07-21T20:20:18.000Z | Given /^I have (?:a|an) "([^\"]*)" work unit scheduled today for "([^\"]*)" hours$/ do |hours_type, hours|
WorkUnit.make(:hours_type => hours_type, :scheduled_at => Date.current, :user => @current_user, :hours => hours)
end
Then /^I should see the following work_units:$/ do |expected_work_units_table|
expected_work_units_table.diff!(find('table').all('tr').map { |row| row.all('th, td').map { |cell| cell.text.strip } })
end
When /^I create a work unit with #{capture_model}$/ do |ticket|
WorkUnit.make(:ticket => find_model!(ticket))
end
Given /^I have no work units for the previous day$/ do
@current_user.work_units.where(:scheduled_at => Date.yesterday).destroy_all
end
Given /^I have a "([^"]*)" hour work unit for yesterday with ticket "([^"]*)"$/ do |hours, ticket|
WorkUnit.make(:ticket => find_model!(ticket), :hours_type => "Normal",
:scheduled_at => 1.days.ago.beginning_of_day, :user => @current_user, :hours => hours)
end
Then /^that work unit should still have a scheduled at date of yesterday$/ do
WorkUnit.last.scheduled_at.should == 1.day.ago.beginning_of_day
end
Then /^I should see the new ticket fields$/ do
within("#on_demand_ticket") do
page.should have_css('#on_demand_ticket_name')
page.should have_css('#on_demand_ticket_description')
page.should have_css('#on_demand_ticket_estimated_hours')
end
end
Then /^there should be a ticket named "([^"]*)" with (\d+) hours$/ do |ticket_name, hours|
sleep(1)
@ticket = Ticket.where(:name => ticket_name).last
@ticket.should_not be_nil
@ticket.work_units.last.hours.should == BigDecimal(hours)
end
| 40.35 | 121 | 0.703222 | 3.0625 |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 140