blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
sequencelengths 1
1
| author_id
stringlengths 0
212
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fb2c64c0218df858e821204c4c485f29f4b33c74 | e0527bce5c53a196752d3a16adf50cb60754de5f | /10-How to Stop Programs Crashing Demos/3-is_square.py | 8bf01fcece7fa35279f95d25ece62fa140398965 | [] | no_license | ARWA-ALraddadi/python-tutorial-for-beginners | ddeb657f419fbc176bea273bc9fb6b88d1894191 | 21cedfc47871ca4d25c2382464c60ab0a2121205 | refs/heads/master | 2023-06-30T20:24:30.688800 | 2021-08-08T08:22:29 | 2021-08-08T08:22:29 | 193,094,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,066 | py | ################################################################
##
## As a demonstration of a function which applies defensive
## programming in different ways, consider a predicate
## which is intended to return True if a given natural
## number (i.e., a non-negative integer) is a square of
## another natural number.
##
## From this description the function could be "misused" in
## three ways:
##
## 1) It could be given a negative number.
## 2) It could be given a floating point number.
## 3) It could be given a value which is not a number at
## all.
##
## By adding some "defensive" code we can make a naive
## implementation more robust by responding appropriately
## to each of these cases:
##
## 1) A negative number can never be a square of another
## number, so we can always return False in this case.
## Here we choose to do so "silently", not drawing
## attention to the unexpected value at all, since the
## answer returned is still "correct" mathematically.
## 2) A positive floating point number could be a square of
## a natural number so, even though we're not required
## to handle floating point numbers we can still do so,
## but choose to generate a "warning" message in this
## case.
## 3) If the function is given a non-numerical value it
## is reasonable to assume that something is seriously
## wrong with the calling code, so in this case we
## generate an "error" message and return the special
## value None.
#---------------------------------------------------------
# Return True if the given natural number is the square of
# some other natural number
def is_square(natural_number):
from math import sqrt
# Three "defensive" checks follow
## # Check that the parameter is a number
## if not (isinstance(natural_number, int) or isinstance(natural_number, float)):
## print('ERROR - parameter must be numeric, given:', repr(natural_number))
## return None
##
## # Check that the parameter is positive
## if natural_number < 0:
## return False
##
## # Check that the parameter is a natural number
## if isinstance(natural_number, float):
## print('Warning - expected natural, given float:', natural_number)
# Return True if the number's square root is a whole number
return sqrt(natural_number) % 1 == 0
#---------------------------------------------------------
# Some tests
#
# The first of these tests is a "valid" one, but the remaining
# three all provide unexpected inputs. Uncommenting the
# "defensive" checks above will cause the function to respond
# appropriately. (It will crash until the defensive code is
# uncommented. Why?)
print(is_square(36)) # expected input
print()
print(is_square(-1)) # unexpected input, but handled silently
print()
print(is_square(225.0)) # unexpected input, handled with warning
print()
print(is_square('August')) # unexpected input, handled as an error
| [
"[email protected]"
] | |
faf3e290df2bf5118bd59fc332e9197ab6822b2c | 29d37bcb67068025a0044d4fe7c54aa07689b89b | /shop/urls.py | 410c584c3bbbed4ec94ab2de925dd72e958687e5 | [] | no_license | leonardorlopez-zz/djangoClase7 | 27b0876601cbd389ad2eeac3beb8f29ff2276655 | 376a1ae692bde1997e3a627c481772bc99085270 | refs/heads/main | 2023-02-06T13:16:35.186791 | 2020-12-29T18:47:24 | 2020-12-29T18:47:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index"),
path("contacto", views.contacto, name="contacto"),
] | [
"[email protected]"
] | |
ad225d31a7a7b3884219fcb3b66eb59f5d1ed90a | 6bb951740f6f82ad6458a9ccc53e08d857cef2da | /setup.py | 995fea61ed14bd92f66c92c3b6395be121ed1e4f | [] | no_license | abdusco/pictools | fefc243ea476b1531e193f6ad18e64e435d0d83d | 00cf22e6df1cc85ff497742367de6a1b71f508b4 | refs/heads/master | 2022-11-27T15:27:13.342825 | 2019-06-02T09:31:34 | 2019-06-02T09:31:34 | 136,924,655 | 1 | 0 | null | 2022-11-22T02:49:06 | 2018-06-11T12:41:11 | Python | UTF-8 | Python | false | false | 342 | py | from setuptools import setup, find_packages
with open('requirements.txt') as f:
required = f.readlines()
setup(
name='pictools',
version='0.1',
packages=find_packages(),
include_package_data=True,
install_requires=required,
entry_points='''
[console_scripts]
pictools=pictools.cli:cli
''',
)
| [
"[email protected]"
] | |
8163546f2fecc94e106eccfc43d3314cb39e1bbd | 26594819e61d1a5f290bb579c5326adbfcce0373 | /training/config.py | 25dc1f6c9a640b11046d9c9ff42d75afc367f761 | [] | no_license | overminder/fyp | 50ba90987fbfc5788d4021d943eebb2027adea45 | a9fe79a5a04589ee1866981c68ff8404cc7efeba | refs/heads/master | 2021-01-23T06:26:37.631250 | 2012-05-15T07:19:32 | 2012-05-15T07:19:32 | 1,816,661 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | from util import local_path
def get_database_path():
return local_path('store.sqlite3')
| [
"[email protected]"
] | |
948e7570c22e3a814efc70ef208bb5769a7b3ba1 | f2568af5aacdb3045f8dd20ec2fd91e395ba57d1 | /createmylvm.py | a2bcdb8ebcc803398d9d04adf65faf10eb88ceef | [] | no_license | shubhambhardwaj007/lvm-automation | e93d9efe61e9951710dc5ee6579ef01d139304e3 | e446f794fc05d1f3dac8e152d428cfc9657b817e | refs/heads/master | 2023-03-26T02:07:51.421192 | 2021-03-27T19:51:46 | 2021-03-27T19:51:46 | 352,161,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,070 | py |
import subprocess
def createmylv():
print(subprocess.getoutput('lsblk'))
device = input("Choose the devices for PV separated by space in between : ").split(" ")
for i in device:
pvcreate = subprocess.getstatusoutput("pvcreate {0}".format(i))
if pvcreate[0] == 0:
print("{0} pv created".format(i))
else:
print("{0} pv failed".format(i))
vgname = input("Enter VG name: ")
x= ' '.join(device)
vgcreate = subprocess.getstatusoutput("vgcreate {0} {1}".format(vgname,x))
lvname = input("Enter LV name: ")
size = input("Enter Size of LV: ")
lvcreate = subprocess.getstatusoutput("lvcreate --size {0} --name {1} {2}".format(size,lvname,vgname))
mount = input("Enter the mountpoint: ")
formating = subprocess.getstatusoutput("mkfs.ext4 /dev/{0}/{1}".format(vgname,lvname))
mount_path = subprocess.getstatusoutput("mount /dev/{0}/{1} {2}".format(vgname,lvname,mount))
if mount_path[0] == 0:
print("Done")
else:
print("Can't mount")
createlv()
| [
"[email protected]"
] | |
d88a957215bb44619a5082930830ded1ee446583 | 7f8aadda02ec803d35c2daa67edfd536f9663251 | /decotest.py | 742fb835e9dd7c1a70dd0ba807ca1ec0858546b4 | [] | no_license | s-tefan/python-exercises | df75c4387562db57b2ad27250b1abe7a5872f332 | a6b69775a2b4c9fe9b7d00881630209b16e9b111 | refs/heads/master | 2023-01-20T02:09:19.814742 | 2023-01-08T14:35:27 | 2023-01-08T14:35:27 | 147,692,645 | 0 | 0 | null | 2018-09-07T14:36:41 | 2018-09-06T15:13:07 | Python | UTF-8 | Python | false | false | 289 | py | def my_decorator(func):
def mupp():
print("Start!")
func()
print("Mål!")
return mupp
def sägnåt():
print("Raj!")
sägnåt()
sägnåt = my_decorator(sägnåt)
sägnåt()
@my_decorator
def sägnåtannat():
print("Nåt annat!")
sägnåtannat()
| [
"[email protected]"
] | |
2cf1cde00eea109a46c3e5983b4906feef72866f | f0856e60a095ce99ec3497b3f27567803056ac60 | /keras2/keras66_gradient2.py | 0e0d0cc1f27912ef32b11753f760a7606dd315f8 | [] | no_license | hjuju/TF_Study-HAN | dcbac17ce8b8885f5fb7d7f554230c2948fda9ac | c0faf98380e7f220868ddf83a9aaacaa4ebd2c2a | refs/heads/main | 2023-09-04T09:13:33.212258 | 2021-10-27T08:00:49 | 2021-10-27T08:00:49 | 384,371,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | import numpy as np
import matplotlib.pyplot as plt
f = lambda x: x**2 - 4 * x + 6
gradient = lambda x: 2*x - 4 # f 미분 -> 미분한 값이 0이 되는 지점이 가장 낮은지점 -> 우리가 찾는 지점
x0 = 0.0
MaxIter = 20
learning_rate = 0.25
print("step\tx\tf(x)")
print("{:02d}\t{:6.5f}\t{:6.5f}".format(0, x0, f(x0)))
for i in range(MaxIter):
x1 = x0 - learning_rate * gradient(x0)
x0 = x1
print("{:02d}\t{:6.5f}\t{:6.5f}".format(i+1, x0, f(x0)))
| [
"[email protected]"
] | |
bde86714c9e9dcc484f3f18212f3921c491fe222 | e50ba4cc303d4165bef9e2917103c084cfbe0e07 | /rating_app/migrations/0016_auto_20201129_1156.py | 25f2b5ff3130d55f5d492b5c185861041cf00086 | [
"MIT"
] | permissive | Antony-me/Ratemyapp | 09049fce54d3a3ed2b256970e7840d20942e8c84 | e547fea82439a3e4f83aa78bf16f93b1ea9ab00b | refs/heads/main | 2023-01-28T16:52:58.635646 | 2020-12-01T16:49:07 | 2020-12-01T16:49:07 | 316,425,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | # Generated by Django 3.1.3 on 2020-11-29 11:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rating_app', '0015_profilemerch'),
]
operations = [
migrations.AlterField(
model_name='profilemerch',
name='projects',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rating_app.post'),
),
]
| [
"[email protected]"
] | |
b99009f7ab3b4c4fc853d63389cf5996c75ea71c | 745d6b6ef2d232034490b0ed31589c5177523fa2 | /eipl_predicates/libs/semantic.py | d7cbb7593d4e1303321aa417578a18c384874ac8 | [] | no_license | fruether/SLEProject | 7df62a5106d8ad2ddc57f18ddde9b27ec591e9f0 | 115e3baf3e1859f78fb70322429628f07dc14f66 | refs/heads/master | 2021-01-10T11:07:12.131624 | 2016-03-28T12:34:26 | 2016-03-28T12:34:26 | 48,611,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 957 | py | __author__ = 'freddy'
scope_stack = []
node_context = {}
import libs.variables
def remove_scope():
global scope_stack
scope_stack.pop()
def add_scope(name):
global scope_stack
scope_stack.append(name)
def get_current_scope():
global scope_stack
return scope_stack[-1]
def init():
global scope_stack
scope_stack = []
def add_context(terminal, value):
global node_context
if terminal in node_context.keys():
node_context[terminal] += [value]
else:
node_context[terminal] = [value]
def terminal_list(terminal):
global node_context
return node_context[terminal] if terminal in node_context.keys() else []
def release_node():
global node_context
node_context = {}
def exec_block(terminal, prefix, value):
variable = terminal + prefix
add_context(terminal, value)
leftSide = terminal_list(terminal)
setattr(libs.variables, variable, leftSide)
return value
| [
"[email protected]_w_724v_09011603_00_018"
] | [email protected]_w_724v_09011603_00_018 |
0fb9f41fc4b217aaea3c263afdd6301cfd1d06a3 | 06cd8cb6ecfd10359b86127bf8f5204c3d6a860c | /umich/python_for_everybody/python_web_data/web_week2.py | a4962d0e81c55728ebf7fab7cca477fc76a34699 | [] | no_license | nikmuhammadnaim/coursera | 8254c33cec834551f72460c5e1c75810bd94c5d8 | 9a150350b685fcb236e40bcf7372c4adfa16ce80 | refs/heads/master | 2020-05-16T03:33:51.346908 | 2019-11-08T08:39:56 | 2019-11-08T08:39:56 | 182,729,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | import re
# We have to solve for actual_file
sample_file = 'data/regex_sum_42.txt'
actual_file = 'data/regex_sum_233803.txt'
with open(sample_file) as content:
sample_answer = re.findall('[0-9]+', content.read())
print('Sample answer: {:,}'.format(sum([int(ans) for ans in sample_answer])))
with open(actual_file) as content:
actual_answer = re.findall('[0-9]+', content.read())
print('Actual answer: {:,}'.format(sum([int(ans) for ans in actual_answer])))
| [
"[email protected]"
] | |
d44bbb217114c0831167824d694d57c29ab86665 | e3f3f911019ac126d01c056eafc7c3183107a5af | /Traffic Sign Detection/all_signs_combined/src/predict.py | 19ed9a428015b625610be9930dfee35938fb451b | [] | no_license | uncctrafficsigndetection/Traffic-Sign-Detection | 595258766f865c4b3c628b002d7b93a774168a9b | 3ff4be52357f4b6340fef94124f8c835ab66fd8a | refs/heads/master | 2020-04-09T20:28:33.910961 | 2018-12-05T21:29:50 | 2018-12-05T21:29:50 | 160,574,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 959 | py | import numpy as np
import time
from sample_model import Model
from data_loader import data_loader
from generator import Generator
checkpoint_dir='tf_data/sample_model'
X='C:/Users/Karthick/Desktop/cvproject/data/5/00000_00000.ppmspeed_2_.ppm'
M = Model(mode = 'test')
yhat = M.predict(X = X, checkpoint_dir = checkpoint_dir)
# save_dir="C:/Users/Karthick/Desktop/cvproject/speedlimitckp/"
# #saver = tf.train.Saver()
# sess = tf.Session()
# saver = tf.train.import_meta_graph('C:/Users/Karthick/Desktop/cvproject/src/tf_data/sample_model/model_epoch70.ckpt.meta')
# saver.restore(sess,tf.train.latest_checkpoint('C:/Users/Karthick/Desktop/cvproject/src/tf_data/sample_model/'))
# #checkpoint_name = tf.train.latest_checkpoint(save_dir)
# #saver.restore(sess, checkpoint_name)
# yhat_numpy = sess.run(yhat, feed_dict = {X : X, keep_prob: 1.0})
# print(yhat_numpy)
# #C:/Users/Karthick/Desktop/cvproject/src/tf_data/sample_model | [
"[email protected]"
] | |
f6fece3b5719a65008ae0fbe700a817b469a7a51 | e7eff96df8160d3c238bf38068c99c7b8bd3005b | /norman/web/frontend/crops.py | 08fa8b6415e718d05231de41cdbcfc0273dddb39 | [] | no_license | sumansai14/norman | 62c3760b47f15bb474786ac045efad5aff757b95 | 43a8c4e53830d57eb552c3ecb98bf2926c9d0457 | refs/heads/master | 2021-03-16T07:57:17.076408 | 2017-05-23T07:36:37 | 2017-05-23T07:36:37 | 92,188,183 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from norman.web.frontend.base import BaseAuthTemplateView
class OrganizationCropsListView(BaseAuthTemplateView):
template_name = 'norman/organization/crops_list.html'
| [
"[email protected]"
] | |
27f8f09695592ba88a397e2f6ad8643602214b9a | 40f3df53f36624d066d606743fd3f155e3d650f2 | /09fizzbuzz.py | 854e259a6391bf3dc67b756943451a5873dcabea | [] | no_license | hunsoo0823/python_study | b72f348074f8d384279a86bcd27bd924632dc5ff | b30394c8abb20a0204eda64140596962d60676cd | refs/heads/master | 2023-05-05T11:28:57.465830 | 2021-05-23T10:01:29 | 2021-05-23T10:01:29 | 288,706,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | """
1. 3의 배수 Fizz
2. 5의 배수 Buzz
3. 15의 배수 Fizz,Buzz
"""
def fizzBuzz(i):
if i%15 == 0:
print("FizzBuzz")
elif i%5 == 0:
print("Buzz")
elif i%3 == 0:
print("Fizz")
else:
print(i)
for i in range(1,100+1):
fizzBuzz(i) | [
"[email protected]"
] | |
92d58b3f75523be1e3fc4fa915f34a24e9720189 | 91992db8c28646f0aae3393e8bbe0af66c0144ea | /ejercicios python-1/suma de matrices.py | cc1b9cf09bdbd74fcc4d8a4b70bad6709c117760 | [] | no_license | Pato38/EDI | 626e33247c4e8850dc29dfe55af45b7f0fbad2ee | 04f244c3b859f2c2a566c3670f21fb9f606d18ab | refs/heads/master | 2021-07-16T16:07:42.499775 | 2020-07-23T15:05:16 | 2020-07-23T15:05:16 | 194,303,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,040 | py | #Se desea saber si la suma de todas las posiciones de la matriz m_1
#es igual a la suma de todas las posiciones de la matriz m_2
#se sabe q ambas son de 8x10 y contiene nros reales en el intervalo 0-100
#considerando dos digitos decimales. Imprimir un msje apropiado e informe la
#situación.Analizar y contestar:
#a)que entrada se requiere?(tipo y cantidad)
#b)cuál es la salida deseada?(tipo y cantidad)
#c)que métodos produce la salida deseada?
m_1=[None]*4
m_2=[None]*4
def carga(dim_f,dim_c):
m=[]
for c in range (dim_c):
m.append([])
for f in range (dim_f):
m[c].append(float(input("ingrese el numero para la posición ["+str(c)+"]["+str(f)+"]: ")))
return m
m_1=carga(2,2)
m_2=carga(2,2)
print(m_1)
print(m_2)
def suma(matriz,dim_c,dim_f):
suma=0
for c in range (dim_c):
for f in range (dim_f):
suma=matriz[c][f]+suma
return suma
suma_m_1=suma(m_1,2,2)
suma_m_2=suma(m_2,2,2)
if suma_m_1==suma_m_2:
print("la suma de las matrices es igual")
else:
print("la suma de las matriz es diferente")
| [
"[email protected]"
] | |
cef0da0725eb5e86efd05782852d6e6134a53ab6 | 6a87bb89a8e34d911eceee2c0f6072b3e6533ba2 | /exercise/stats.py | 0236ffaa8d25187c605050ae6181e198d507876f | [] | no_license | 1baikai/python_knowledge | 1d78ea8c45e58e7f86cc42f0d2691809e1364dca | f26b41874618622e709990110c65830d81d334cf | refs/heads/master | 2020-03-30T00:43:42.833665 | 2018-09-27T07:10:07 | 2018-09-27T07:10:07 | 150,540,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | # stats.py
def median(L):
L.sort()
print(L)
a=L.count(L)
if a % 2 ==0:
x=(L[a/2]+L[(a/2)+1])/2
print (x)
else:
x=(L[(a+1)/2])
print (x)
L=[1,2,3,4,5,6,7,8,9,10,12,11,15,34,6,3,5,6,6]
median(L)
# def mode(L):
# d={}
# for i in L:
# if i not in d:
# d[i]=1
# else:
# d[i]+=1
# return max(d)
# def mean(L):
# a=sum(L)
# avr=a/len(L)
# return avr
| [
"[email protected]"
] | |
81b4f20e477ab12da83b102307af3b041965f606 | ae4bbc0548754a505d9874d20cacaad92e48c576 | /basic/ep9_oop/warriors.py | 3c56ea8a0bf8c355fa7f4053265af2b2c49298e1 | [] | no_license | ramanathanaspires/learn-python | 7bc50c5022861ea4eba23480ba58f3e66c24f9eb | bb42cffb20ab363610dd6d667ed2c072f88c322c | refs/heads/master | 2021-05-05T11:33:17.038891 | 2018-01-09T22:05:20 | 2018-01-09T22:05:20 | 118,204,801 | 0 | 1 | null | 2018-01-20T03:27:30 | 2018-01-20T03:27:30 | null | UTF-8 | Python | false | false | 1,683 | py | import math
import random
# Warrior & Battle Class
class Warrior:
"""docstring for Warrior"""
def __init__(self, name="Warrior", health=0, atkMax=0, blockMax=0):
self.name = name
self.health = health
self.atkMax = atkMax
self.blockMax = blockMax
def attack(self):
attkAmt = self.atkMax * (random.random() + .5)
return attkAmt
def block(self):
blockAmt = self.blockMax * (random.random() + .5)
return blockAmt
class Battle:
def startFight(self, warrior1, warrior2):
while True:
if self.getAttackResult(warrior1, warrior2) == "Game Over":
print("Game Over")
break
if self.getAttackResult(warrior2, warrior1) == "Game Over":
print("Game Over")
break
@staticmethod
def getAttackResult(warriorA, warriorB):
warriorAAttkAmt = warriorA.attack()
warriorBBlockAmt = warriorB.block()
damage2WarriorB = math.ceil(warriorAAttkAmt - warriorBBlockAmt)
warriorB.health = warriorB.health - damage2WarriorB
print(f"{warriorA.name} attacks {warriorB.name} and deals {warriorAAttkAmt} damage.")
print(f"{warriorB.name} is down to {warriorB.health} health")
if warriorB.health <= 0:
print(f"{warriorB.name} has died and {warriorA.name} is victorious.")
return "Game Over"
else:
return "Fight Again"
def main():
maximus = Warrior("Maximus", 50, 20, 10)
leonidas = Warrior("Leonidas", 50, 20, 10)
battle = Battle()
battle.startFight(maximus, leonidas)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
492c142cb1d23510b7a9f1d4f7229fdbf87f824a | 76357a33888662ee16fd90cebf83521d952ba9c1 | /stringpattern/strpattern9.py | 6933820726f14879329b7dee3d0e80d1dfa59788 | [] | no_license | mukesh25/python-mukesh-codes | 10c287ea2e566c55539b925e765c38f05964d84f | 819f289418b7d41c17e52fc7a749d2c89ea7f9ca | refs/heads/master | 2022-12-18T03:30:06.917785 | 2020-09-18T04:19:44 | 2020-09-18T04:19:44 | 296,227,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | #Assume input string contains only alphabet symblols and diits.
#write a program to sort characters of the string, first alphabet symblols followed by digits?
# input = 'B4A1D3'
# output = 'ABD134'
s = input('enter some alphanumeric String to sort: ')
alphabets = []
digits = []
for ch in s:
if ch.isalpha():
alphabets.append(ch)
else:
digits.append(ch)
#print(alphabets)
#print(digits)
output = ''.join(sorted(alphabets)+sorted(digits))
print(output) | [
"[email protected]"
] | |
1ee177ef1e0fb91c84caaab0b1cb15bebb0cf5c5 | ff240973859bbcfb24ff6225dd1a3cc67a475be2 | /eventFinderApp/filters.py | 46f0810f55ace99d445e13ef0bced37443a21f94 | [] | no_license | jojo-baker/eventfinda-app | 53c97b35b9ecf82bacf3f6115a927283fe76cec4 | ad027101a34a2a2878463047913e96f90e1d4d56 | refs/heads/master | 2022-12-06T01:17:19.584689 | 2019-10-05T04:21:20 | 2019-10-05T04:21:20 | 206,299,470 | 0 | 1 | null | 2022-11-22T04:16:58 | 2019-09-04T11:05:17 | Python | UTF-8 | Python | false | false | 446 | py | import django_filters
from .models import Event
from django.contrib.auth.models import Group
from django import forms
from django.forms import ModelForm, SplitDateTimeField
from django.contrib.admin import widgets
class EventFilter(django_filters.FilterSet):
start_time = SplitDateTimeField(widget=widgets.AdminSplitDateTime())
class Meta:
model = Event
fields = ('categories', 'location', 'start_time')
| [
"“[email protected]”"
] | |
afe8e5aa19c0bea3a0e13f554987ea075ac9105d | c899829ec2aa4fe528d530a3dd3c77937e04d25e | /BigO/GoogleInterviewExample.py | 8789a9d19c94d85c10710bcc66235cb99380c2b9 | [] | no_license | DavidKNam/DataStructuresAndAlgorithms | aaa57eb44c998776c23a2b75124bba9c886d30f9 | 47554493b3f8325341832cf66db693139f0370da | refs/heads/master | 2023-02-10T05:54:55.702221 | 2021-01-08T05:43:24 | 2021-01-08T05:43:24 | 325,725,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | def findPairSum(array, sum):
pair_set = set()
for value in array:
if value in pair_set:
return True
else:
pair_set.add(sum - value)
return False
print(findPairSum([1,2,4,4], 8)) | [
"[email protected]"
] | |
fd73e43fd23cb6b607c875f26459c5aa8d8c3e71 | d3d61e9b8cbdd82015ac64ad25f227603f80bd0b | /academy/urls.py | 60337c0b3cc8513ac88985021ae283cd61c13d79 | [] | no_license | Arnastria/icl-academy | e1304bd5df4172483f841c8054e59854a9cc9dff | 0cf1f11a27f16283cbfae8f856769494fbc84ab3 | refs/heads/master | 2023-07-14T05:49:22.845966 | 2021-08-15T14:41:30 | 2021-08-15T14:41:30 | 396,381,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | from django.urls import path
from . import views
urlpatterns = [
path('dashboard/', views.dashboard),
path('login/', views.login_me),
path('logout/', views.logout_me),
path('form-registration/', views.form_registration),
path('', views.landing, name='index'),
]
| [
"[email protected]"
] | |
6b40a94b9d1873941b7d43f3eaef484e9ecd0b37 | 1504f499c2985a5ea0abdc974ebd04a2c5e82ae3 | /equalities/genPB_diag_spec.py | 81ba447eb9aff969cb01a7f0b8cc1fadb12e911c | [] | no_license | vliew/nonlinear | 658b0eb4040ec4879a0316c21c25b4641053a0fb | 74e36a20adf9b6ead88f482f61eb786f88c09965 | refs/heads/master | 2022-07-29T13:20:04.350417 | 2020-08-30T03:50:17 | 2020-08-30T03:50:17 | 287,128,574 | 0 | 1 | null | 2020-08-30T03:50:19 | 2020-08-12T22:35:15 | Python | UTF-8 | Python | false | false | 2,179 | py | from circuits_pb import *
import sys
import os
def writeDIAG_SPEC(numBits, greater_than=True):
# Map from columns to the constraints on those columns.
# We will fill col_constraints with the full set of constraints.
# Constraints in column -1 will always be included.
col_constraints = {}
for i in range(-1,2*numBits+2):
col_constraints[i] = []
# Choose filename based on settings
keyFileName = "benchmarks/%ddiag_spec" % (numBits)
opbFileName = "benchmarks/%ddiag_spec" % (numBits)
if greater_than:
keyFileName += "_greater"
opbFileName += "_greater"
else:
keyFileName += "_less"
opbFileName += "_less"
keyFileName += ".key"
opbFileName += ".opb"
f_key = open(keyFileName,'w')
f_opb = open(opbFileName,'w')
# Initialize variable maps
nextDIMACS = 1
# Create the variables
x, nextDIMACS = createBITVECTOR(nextDIMACS,f_key,'x',numBits)
y, nextDIMACS = createBITVECTOR(nextDIMACS,f_key,'y',numBits)
t, nextDIMACS = createGRID_VARIABLES(nextDIMACS,f_key,"t",numBits, numBits)
c, nextDIMACS = createGRID_VARIABLES(nextDIMACS,f_key,"c",numBits+1,numBits)
d, nextDIMACS = createGRID_VARIABLES(nextDIMACS,f_key,"d",numBits+1,numBits)
xy, nextDIMACS = createBITVECTOR(nextDIMACS,f_key,'xy',2*numBits)
xy2, nextDIMACS = createBITVECTOR(nextDIMACS,f_key,'xy2',2*numBits)
writeDIAGSUM(col_constraints,c,d,t,xy,numBits)
writeALGEBRA_ARRAYMULT(col_constraints, x,y,t,xy2,numBits)
if greater_than:
# inequality xy > xy2
writeBIGGER_NUMBER(col_constraints,0,xy,xy2,2*numBits)
else:
# inequality xy < xy2
writeSMALLER_NUMBER(col_constraints,0,xy,xy2,2*numBits)
# Write all constraints to file
writeHEADER(f_opb, nextDIMACS, col_constraints)
for col in col_constraints:
for clause in col_constraints[col]:
f_opb.write(clause)
f_opb.close()
if __name__ == '__main__':
if not os.path.exists("benchmarks"):
os.makedirs("benchmarks")
numBits = int(sys.argv[1])
writeDIAG_SPEC(numBits, True)
writeDIAG_SPEC(numBits, False)
| [
"[email protected]"
] | |
c5c7f71bfb61f86110b05a007c1a8ff026418970 | 5fe2c3e946bbae8c8aef3cac07cce8652a0e4404 | /accounts/views.py | 420829c11c0539562a0f0b511f27a4ec27580886 | [] | no_license | neilduncan/learning-django | 1ec72f1f1a90863ca1f843e4739336074e3af855 | 631e8621d3bffc4ea4c886bcaed25626d893e10b | refs/heads/master | 2020-03-26T17:10:48.959673 | 2018-08-23T16:19:17 | 2018-08-23T16:19:17 | 145,145,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | from django.contrib.auth import login as auth_login
from django.shortcuts import render, redirect
from .forms import SignUpForm
def signup(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save()
auth_login(request, user)
return redirect('home')
else:
form = SignUpForm()
return render(request, 'signup.html', {'form': form})
| [
"[email protected]"
] | |
05ee03d0fc92901d67b69090b7700728099ddbf5 | 48bb7b034ddac0858c649765530f028c15bb79de | /intelligence_agency/intelligence_agency/spiders/cia.py | 8cd6aeb1b40f276a735c3c38c57501262eef2be8 | [] | no_license | eduardoromeror9/ProyectoScrapy | 1abb59ed52206563dcf00bc7799fb6e84a320b56 | dca7680bc808760b3eb19d129b15101d5e598799 | refs/heads/master | 2023-03-12T21:57:20.089169 | 2021-03-02T11:31:03 | 2021-03-02T11:31:03 | 343,748,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,451 | py | import scrapy
# XPath expreciones links, titulos y cuerpo.
# Links = //a[starts-with(@href, collection) and (parent::h3|parent::h2)]/@href
# Title = //h1[@class="documentFirstHeading"]/text()
# Paragraph = //div[@class="field-item even"]//p[not(@class)]/text()
class SpiderCIA(scrapy.Spider): # Atributos del Spider: Nombre, links y configuraciones.
name = 'cia'
start_urls = [
'https://www.cia.gov/readingroom/historical-collections'
]
custom_settings = {
'FEED_URI': 'cia.json',
'FEED_FORMAT': 'json',
'FEED_EXPORT_ENCODING': 'utf-8'
}
def parse(self, response):
# Expresion para extraer los links con el metodo getall()
links_declassified = response.xpath('//a[starts-with(@href, collection) and (parent::h3|parent::h2)]/@href').getall()
for link in links_declassified:
yield response.follow(link, callback=self.parse_link, cb_kwargs={'url': response.urljoin(link)}) # Extrae el titulo y descripcion del archivo desclasificado.
def parse_link(self, response, **kwargs):
link = kwargs['url']
title = response.xpath('//h1[@class="documentFirstHeading"]/text()').get()
paragraph = response.xpath('//div[@class="field-item even"]//p[not(@class)]/text()').get()
yield {
'url': link,
'title': title,
'body': paragraph
} | [
"[email protected]"
] | |
3aa12a0346cfb567a5fa45ab2f0e54d742a93c46 | c078294763f3b12ee9eb4d646b1dec988f261597 | /c-tools/phase-averages/collision-freq/plt/collision_freq.py | fa5abeac4e910d28621d89949011486fc1696bf1 | [
"Apache-2.0"
] | permissive | dwille/bbtools | a2d224456f87ace17863042a96a210fac564a11d | a5b366862d6543c7c6cc1e36b7c1e46b9f55339e | refs/heads/master | 2021-01-17T04:08:05.395250 | 2018-04-06T11:55:59 | 2018-04-06T11:55:59 | 41,551,749 | 0 | 1 | null | 2016-03-03T15:59:41 | 2015-08-28T14:30:39 | Matlab | UTF-8 | Python | false | false | 2,718 | py | #!/usr/bin/env python2
from setup import *
# Sims that we have data for
simdirs = glob.glob(datadir)
nsims = int(len(simdirs))
# Data arrays -- [rho, nparts]
data = np.empty([rho_star.size, nparts.size])
data.fill(np.nan)
# Pull data
for cc, currdir in enumerate(simdirs):
# Pull data, cut out first time step
time = np.genfromtxt(currdir, skip_header=1, usecols=0)[1:]
ncolls = np.genfromtxt(currdir, skip_header=1, usecols=1)[1:]
freq =(ncolls[-1] - ncolls[0]) / (time[-1] - time[0])
curr_nparts = int(currdir.split("/")[5])
curr_rho = float(currdir.split("/")[6][3:])
# Find an index to store in data array and store
pp = np.argwhere(curr_rho == rho_star)
nn = np.argwhere(curr_nparts == nparts)
print("rho = %.1lf, nparts = %d, freq = %lf" % (curr_rho, curr_nparts, freq))
data[pp, nn] = freq
sys.exit()
# colors for plotting -- still ungeneral but better than in tetrads
baseColors = ['r', 'g', 'b', 'k']
baseShades = [0.4, 0.57, 0.74, 0.9]
colors = ['']*nsims
shades = ['']*nsims
for cc, currdir in enumerate(simdirs):
# Break directory string to grab nparts and rho
curr_nparts = int(currdir.split("/")[5])
curr_rho = float(currdir.split("/")[6][3:])
# Different color for different volume fractions
for nn, n_check in enumerate(nparts):
if (curr_nparts == n_check):
colors[cc] = baseColors[nn]
# Different intensities for different density ratios
for pp, p_check in enumerate(rho_star):
if (curr_rho == p_check):
shades[cc] = baseShades[pp]
# plot
fig1 = plt.figure(figsize=(4,6))
# Constant volume fraction, changing density ratio
ax1 = fig1.add_subplot(211)
plt.plot(rho_star, data[:,0], 'o--')
plt.plot(rho_star, data[:,1], 'o--')
plt.plot(rho_star, data[:,2], 'o--')
plt.plot(rho_star, data[:,3], 'o--')
plt.legend([r"$\phi = 0.087$",r"$\phi = 0.175$",r"$\phi = 0.262$",r"$\phi = 0.349$"],
loc="upper right", framealpha=0.6)
plt.xlabel(r"$\rho^*$")
plt.ylabel(r"collisional frequency, $n_{coll}/ms$")
plt.xlim([1,6])
## Constant density ratio, changing volume fraction
ax2 = fig1.add_subplot(212)
plt.loglog(vfrac, data[0,:], 'o--')
plt.loglog(vfrac, data[1,:], 'o--')
plt.loglog(vfrac, data[2,:], 'o--')
plt.loglog(vfrac, data[3,:], 'o--')
plt.legend([r"$\rho^* = 2.0$",r"$\rho^* = 3.3$",r"$\rho^* = 4.0$",r"$\rho^* = 5.0$"],
loc="lower right")
plt.xlabel(r"$\phi$")
plt.ylabel(r"collisional frequency, $n_{coll}/ms$")
plt.xlim([.05,1])
plt.ylim(ymax=125)
xpts = [.07, .50]
ypts = 1500*np.power(xpts, 3.)
print xpts
print ypts
plt.plot(xpts, ypts, 'k--')
plt.text(.07, .3, r"slope=3")
# save
imgname = imgdir + "coll_freq"
print "Saving figure to %s" % imgname
plt.savefig(imgname + ".png", bbox_inches="tight", format='png')
| [
"[email protected]"
] | |
af5ee455cb7393efd56233ca1556032ce3b6435c | 4c68778814b938d91d184749b50940549439c0f3 | /scheme/fields/time.py | fe6e0bb58b391be8c8074c6fe7792ac82fede471 | [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jordanm/scheme | 96a747258ce68de756ffe7996b37c3e8747a740c | 5a87e24b35bb2f80b474273bf2e5c5fd563214e0 | refs/heads/master | 2021-01-17T05:48:51.479427 | 2020-01-20T16:03:28 | 2020-01-20T16:03:28 | 32,604,302 | 8 | 4 | NOASSERTION | 2020-01-20T16:03:29 | 2015-03-20T20:05:12 | Python | UTF-8 | Python | false | false | 3,174 | py | from __future__ import absolute_import
from datetime import time
from time import strptime
from scheme.exceptions import *
from scheme.field import *
__all__ = ('Time',)
class Time(Field):
"""A field for time values."""
basetype = 'time'
equivalent = time
parameters = {'maximum': None, 'minimum': None}
pattern = '%H:%M:%S'
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a time value'),
FieldError('minimum', 'minimum value', '%(field)s must not occur before %(minimum)s'),
FieldError('maximum', 'maximum value', '%(field)s must not occur after %(maximum)s'),
]
def __init__(self, minimum=None, maximum=None, **params):
super(Time, self).__init__(**params)
if maximum is not None:
try:
maximum = self._unserialize_value(maximum)
except InvalidTypeError:
raise TypeError("argument 'maximum' must be either None, a datetime.time,"
" or a string in the format 'HH:MM:SS'")
if minimum is not None:
try:
minimum = self._unserialize_value(minimum)
except InvalidTypeError:
raise TypeError("argument 'minimum' must be either None, a datetime.time,"
" or a string in the format 'HH:MM:SS'")
self.maximum = maximum
self.minimum = minimum
def __repr__(self):
aspects = []
if self.minimum is not None:
aspects.append('minimum=%r' % self.minimum)
if self.maximum is not None:
aspects.append('maximum=%r' % self.maximum)
return super(Time, self).__repr__(aspects)
def describe(self, parameters=None, verbose=False):
params = {}
if self.maximum is not None:
params['maximum'] = self.maximum.strftime(self.pattern)
if self.minimum is not None:
params['minimum'] = self.minimum.strftime(self.pattern)
return super(Time, self).describe(parameters=parameters, verbose=verbose, **params)
def _serialize_value(self, value):
return value.strftime(self.pattern)
def _unserialize_value(self, value, ancestry=None):
if isinstance(value, time):
return value
try:
return time(*strptime(value, self.pattern)[3:6])
except Exception:
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
def _validate_value(self, value, ancestry):
if not isinstance(value, time):
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
minimum = self.minimum
if minimum is not None and value < minimum:
raise ValidationError(identity=ancestry, field=self, value=value).construct('minimum',
minimum=minimum.strftime(self.pattern))
maximum = self.maximum
if maximum is not None and value > maximum:
raise ValidationError(identity=ancestry, field=self, value=value).construct('maximum',
maximum=maximum.strftime(self.pattern))
| [
"[email protected]"
] | |
5424727597a003990a354a598d2d89c6bffb0de9 | 8a8b5528b7748f101fb928538485b247a6686b88 | /unidad 1/bichos - Proyecto 1/generacionProcesos.py | c035fbd5e116daf420213b8699bd6fbe285f0ef5 | [] | no_license | DanielVldz/tbd_PedroVillaCasas | ab3fe594264f0d9f083545791c546f4dabb16619 | 1e2ea81b187b89d594d3777bac2eb919339c6a4c | refs/heads/master | 2022-03-23T13:07:42.811588 | 2019-12-12T14:45:50 | 2019-12-12T14:45:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,755 | py | import random
import names
def obtenerEmpleo():
empleos = ["Intel", "Oracle", "Microsoft", "AMD", "VolksWagen", "Ley", "Soriana", "Wal-Mart", "Sam's Club", "Universidad autónoma de Sinaloa", "Instituto Tecnológico de Culiacán", "Farmacias Similares", "Taquería el güero", "Farmacias GI", "Delia Barraza", "Burger King", "McDonald\'s", "OXXO", "Gasolineras Pemex", "Sears", "Liverpool", "Cinépolis", "Cinemex", "Sushi Factory", "CBtis", "Conalep", "Cobaes", "Escuela Secundaria Técnica No. 50", "Escuela Secundaria Técnica No. 72", "Escuela Secundaria Técnica No. 1", "Televisa", "TV Azteca", "TV Pacífico", "Coca-Cola", "Pepsi", "Costco", "Coppel", "Electra", "Banamex", "BBVA", "Santander", "Unidad De Servicios Estatales", "Hospital General", "Hospital de la mujer", "Hospital pediátrico de Sinaloa", "Universidad Autónoma de Occidente", "Tecnológico de Monterrey"]
telefonos = [4591301323, 6081412155, 1689752242, 1224375792, 3895669389, 5512187296, 5562160334, 3756929861, 2864794192, 3042543261, 6230567999, 4191183710, 4191111575, 8038240622, 5861787051, 3187392808, 4087365511, 1444213460, 9574972480, 3862661155, 9163193653, 8853595158, 7314945048, 7919060124, 4181657941, 7660788292, 4791657867, 2573515139, 7313346268, 9741497682, 3129122828, 3145415126, 2303365736, 1511466900, 8451343856, 1679185676, 8460198504, 2261481245, 5899083049, 4962857555, 1602912755, 8521314180, 1113011149, 1362015334, 8776770252, 7077775806, 1972610403,]
x = random.randint(0, len(empleos) - 1)
return (empleos[x], telefonos[x])
def generarTutor(x):
for i in range(x):
empleo = obtenerEmpleo()
#print("('",names.get_first_name(),"',\t",names.get_last_name(),",\t",names.get_last_name(),",\t",empleo[0],",\t",empleo[1],",\t",numeroTelefonico(10),"),")
print('(\'{}\', \'{}\', \'{}\', \'{}\', {}, {}),'.format(names.get_first_name(), names.get_last_name(), names.get_last_name(), empleo[0], empleo[1], numeroTelefonico(10)))
def generarNiño(x):
for i in range(x):
nivel = random.randint(1,6)
grado = random.randint(1,2)
tutor = random.randint(1, 103)
nacimiento = generarFecha()
if grado == 1:
grado = 'A'
else:
grado = 'B'
print("(\'{}\',\'{}\',\'{}\',{},\'{}\',{},\'{}\'),".format(names.get_first_name(), names.get_last_name(), names.get_last_name(), nivel, grado, tutor, nacimiento))
def generarFecha():
año = 2019
mes = random.randint(1,12)
dia = 0
if mes == 2:
dia = random.randint(1,28)
elif mes == 12:
dia = random.randint(1, 6)
elif mes == 1 or mes == 3 or mes == 5 or mes == 7 or mes == 8 or mes == 10:
dia = random.randint(1, 31)
else:
dia = random.randint(1, 30)
if dia < 10:
dia = '0{}'.format(dia)
if mes < 10:
mes = '0{}'.format(mes)
return '{}{}{}'.format(año, mes, dia)
def numeroTelefonico(n):
rangoInicio = 10**(n-1)
rangoFin = (10**n)-1
return random.randint(rangoInicio, rangoFin)
def getAlergia():
alergia = ["Lacteos", "Mariscos", "Huevo", "Trigo", "Maní", "Almendras", "Nueces", "Soya","Fresas","Cacahuates", "Apio", "Camarones"]
x = random.randint(0, len(alergia) - 1)
return alergia[x]
def niñoAlergias(x):
for i in range(x):
print('({},\'{}\'),'.format(random.randint(1, 118), getAlergia()))
def generarAdeudo(x):
for i in range(x):
tutor = random.randint(1, 103)
monto = random.randint(200, 700)
fecha = generarFecha()
print('({},{},\'{}\'),'.format(tutor, monto, fecha))
def generarCapturas(x):
for i in range(x):
ususario = random.randint(1,25)
especie = random.randint(1,150)
latitud = random.randint(-50, 180)
longitud = random.randint(-50, 180)
print('({},{},{},{}),'.format(ususario, especie,latitud,longitud))
def randomRango(min,max):
x = min + (random.random() * (max - min))
return x
generarCapturas(100) | [
"[email protected]"
] | |
4fa5743d2a701a36134841c68c75c4fe672e5784 | 3d46ecb6d6d85d8d2240f1a320d2f23573e60dc7 | /tests/gcD_test.py | 843bc3d9d325cbbb2b54d5aabfb4b43c6c097147 | [] | no_license | yangcyself/BYSJ02 | 6d77a4669342086dc5b7a8140460f79d9eb8e2cb | b6486c3d8e4a190a21005ca80643cfcfd30fb724 | refs/heads/master | 2022-11-15T23:58:09.626089 | 2020-07-02T08:41:34 | 2020-07-02T08:41:34 | 254,871,090 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,366 | py | """
Test of the dynamics under the holonomic constraint of the ground contact constraints
Compare the dstate and the torque
However, this is not very accurate, But the error may be caused by the PD in the constraint of the dynamic
"""
import sys
sys.path.append(".")
import globalParameters as GP
GP.DT = 1e-3
from ctrl.WBCCtrl import *
if __name__ == "__main__":
CTRL.restart()
ct = WBC_CTRL()
# ct = QP_WBC_CTRL()
ct.torso_des = np.array([0,1.5,0.1])
CTRL.gc_mask.reg(ct, constant = np.array([True,True]))
ang = np.pi/15
q_init = [0, 0.8*np.cos(ang) +0.05, 0, np.pi - ang, 0, -(np.pi - ang), 0]
ct.setState( q_init + list(np.random.random(7)))
lastState = np.zeros(14)
for link_idx in qind:
p.changeDynamics(GP.robot, link_idx,
linearDamping=0.0,
angularDamping=0.0,
jointDamping=0.0,
lateralFriction = 0,
spinningFriction = 0,
rollingFriction = 0,
anisotropicFriction = 0
)
parentFramePosition = np.array([0,0,0.1])
parentFramePosition[GP.PRISMA_AXIS[0]] = - GP.CROSS_SIGN * 0.3
jointAxis = [1,1,0]
jointAxis[GP.PRISMA_AXIS[0]] = 0
holoright = p.createConstraint(GP.floor, -1, GP.robot, 5, p.JOINT_POINT2POINT,
jointAxis = jointAxis, parentFramePosition = list(parentFramePosition),
childFramePosition = [0,0,0.03])
hololeft = p.createConstraint(GP.floor, -1, GP.robot, 8, p.JOINT_POINT2POINT,
jointAxis = jointAxis, parentFramePosition = list(parentFramePosition*np.array([-1,-1,1])),
childFramePosition = [0,0,0.03])
t = 0
torque = np.zeros(7)
while(t<5):
# torque += (np.random.random(7)-0.5) *4
# torque *= np.array([0,0,0, 1,1,1,1]) * 0 # because the first dimension has no motor
ct.resetFlags()
torque = ct.cmdFr # ct.cmdFr has the effect of setting torque
# Fr = ct.WBC
# print("Fr :", Fr)
# ct.setJointTorques(torque[3:])
print("torque :", torque)
dstate = ct.DA @ ct.state + ct.DB @ torque + ct.Dg
print("dstate :", dstate[GP.QDIMS:])
p.stepSimulation()
print("real:", ((ct.state - lastState)/dt)[GP.QDIMS:])
lastState = ct.state
t += dt | [
"[email protected]"
] | |
c77425f40fa7658865f28037d68da947fb7f1d5f | 0b23337b3c1d310712d7e8bfebef6f8237cc7ec8 | /Assignment5/problem1.py | 8e95b6f88cd15145ef956aa06a7ae0ff3e59eb07 | [] | no_license | PierreAlexisRoy/Phys512PARoy | bd5721911078c44401920fd45828e4d455060073 | 52560d7bf4071f2fbbc86a69ebddcd4f69e4cc24 | refs/heads/master | 2020-07-27T13:55:35.714245 | 2019-12-08T22:38:01 | 2019-12-08T22:38:01 | 209,114,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,678 | py | # Pierre-Alexis Roy
# 260775494
# Phys512 - Problem Set 5 - Q1
#--------------------------------------------------
import numpy as np
from matplotlib import pyplot as plt
from potential import boundCond, true_pot
from numerical import Ax
# In this problem, we have a charge cylindrical conductor, held at
# a potential V in a box with walls at potential zero.
# We use a relaxation solver to solve for the potential everywhere in the box.
#plt.ion()
# We define the size of our grid in n --> n x n grid
n=500
# we define the radius of our cylinder (circle)
radius = 50
# we use a function defined in potential.py to get the boundary conditions
bc, mask = boundCond(radius, n)
# We also compute the true analytic potential with a function
# in potential.py
trueV = true_pot(radius, n, bc, mask)
# when computing numerically the potential, the initial V is the boundary conditions
V=bc.copy()
# we will compute a tolerance to tell the algorithm when it's converged
# Recall we are solving Ax=b or b-Ax=0
# Hence, let err be this difference
# Ax is computed in numerical.py
b=-(bc[1:-1,0:-2]+bc[1:-1,2:]+bc[:-2,1:-1]+bc[2:,1:-1])/4.0
err = b - Ax(V, mask)
# define our tolerance
tol = 0.01
print('Running numerical algorithm')
for i in range(30*n):
V[1:-1,1:-1]=(V[1:-1,0:-2]+V[1:-1,2:]+V[:-2,1:-1]+V[2:,1:-1])/4.0
V[mask]=bc[mask]
# test convergence
test = np.sum(err*err)
if test <= tol:
print('Converged after '+str(i)+' iterations')
break
#update the error
err = b - Ax(V,mask)
# get the charge distribution rho as we did in class
rho=V[1:-1,1:-1]-(V[1:-1,0:-2]+V[1:-1,2:]+V[:-2,1:-1]+V[2:,1:-1])/4.0
# We will plot all results together
fig, ax=plt.subplots(1,3, figsize=(15,7))
# numerical potential
ax0 = ax[0].imshow(V)
ax[0].set_title('Numerical potential in '+str(i)+' steps')
fig.colorbar(ax0, ax=ax[0], fraction=0.046, pad=0.04)
# true potential
ax1=ax[1].imshow(trueV)
ax[1].set_title('Analytic potential')
fig.colorbar(ax1, ax=ax[1], fraction=0.046, pad=0.04)
# charge distribution
ax2=ax[2].imshow(rho)
ax[2].set_title('Charge dist.')
fig.colorbar(ax2, ax=ax[2], fraction=0.046, pad=0.04)
plt.savefig('problem1_plots.pdf')
print('\nWe see the charge density is just a circle on the edge of the wire.')
print('Indeed, in a conductor, no charge should remain inside and it should go ')
print('on the edge.')
# we compute the charge per unit length
# we will sum up our rho and divide by 2PiR to get a rough estimate
charge_tot = np.sum(rho)
charge_perl = charge_tot/(2*np.pi*radius)
print('\nWe get a charge per unit length of ', charge_perl)
print('Keep in mind that this does not take epsilon0 into account.') | [
"[email protected]"
] | |
7bbfd94accf83c65ae4546356bccb460b15a900e | b8ea631aae5d132c7b0236684d5f7c12d3c222be | /Library/Graph/Dijkstra_heapq.py | 6164198b7fcd573492928ce2f82d98e051b23864 | [] | no_license | Ryushi-tech/card3 | 68c429313142e58d4722a1cd5a4acc4ab39ca41f | 883636b2f518e38343a12816c5c641b60a87c098 | refs/heads/master | 2021-07-05T22:46:33.089945 | 2020-12-12T15:31:00 | 2020-12-12T15:31:00 | 209,176,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | import heapq
def dijkstra(s):
q = []
dist[s] = 0
heapq.heappush(q, [0, s])
while q:
p, v = heapq.heappop(q)
if dist[v] < p:
continue
for i, x in g[v]:
if dist[i] > dist[v] + x:
dist[i] = dist[v] + x
heapq.heappush(q, [dist[i], i])
n = int(input())
g = [[] for _ in range(n)]
for _ in range(n - 1):
a, b, c = map(int, input().split())
a, b = a - 1, b - 1
g[a].append((b, c))
g[b].append((a, c))
inf = 10 ** 14
dist = [inf] * n
m, k = map(int, input().split())
k = k - 1
dijkstra(k)
for _ in range(m):
e, f = map(int, input().split())
res = dist[e - 1] + dist[f - 1]
print(res)
| [
"[email protected]"
] | |
48cd42cf70cd98648276cce423fd29d9850f9d0a | f2ab8ccda7203dd37d61facb9978cf74b781c7f1 | /tests/apps.py | 863cf58e139c91b4d865bed2d8a46b94a061f588 | [
"MIT"
] | permissive | Apkawa/easy-thumbnails-admin | 1991137224dcd117520b2c114d4012daf803776e | 9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4 | refs/heads/master | 2021-01-01T15:47:34.334792 | 2017-11-23T10:38:09 | 2017-11-23T10:38:09 | 97,703,157 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | try:
from django.apps import AppConfig
except ImportError:
# Early Django versions import everything in test, avoid the failure due to
# AppConfig only existing in 1.7+
AppConfig = object
class TestConfig(AppConfig):
name = 'tests'
label = 'tests'
| [
"[email protected]"
] | |
6885e4c483c0399abfd20154156beeadf8b508af | d048a865519b5f944e1430c6181d00399c979d9c | /gallery/gallery/urls.py | f765f9c8680d91d33f2171654643694a2b0f21ad | [] | no_license | jithinvijayan007/PaceWisdom- | 5f84261c4ba7f51e25c8c21074b48214a24cb6d2 | 1ba00814a757edb327923afcaf20fe04652efa0e | refs/heads/master | 2023-03-06T04:00:21.729404 | 2021-02-21T18:56:54 | 2021-02-21T18:56:54 | 340,974,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 835 | py | """gallery URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('user.urls')),
path('',include('img_gallery.urls')),
]
| [
"[email protected]"
] | |
3e64fdee6716480ab6e525d0604b8efbd0d713d9 | 0bced66ce58044abd34fff545b3ff24dd6fc9913 | /02-python-programming/02-04-reusable-classes/tests/q4.py | 02fb8bafb7ac38d490cc4fa947d2eb1c20d1b186 | [] | no_license | prof-couch/comp205-dist | 7d7d5431a6bb60df01bbdf576560bb45516eb4c9 | 3065331db16766d0e529696416a069c2f6d61e61 | refs/heads/master | 2020-07-18T23:36:10.604657 | 2019-12-12T15:32:59 | 2019-12-12T15:32:59 | 206,334,869 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | test = {
'name': 'Question 4',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> type(purchases) # doctest:+NORMALIZE_WHITESPACE
<class '__main__.Purchases'>
>>> type(purchases.purchases)
<class 'list'>
>>> print(purchases.purchases[0]) # doctest:+NORMALIZE_WHITESPACE
The cost of socks is 10.0
>>> print(purchases.purchases[1]) # doctest:+NORMALIZE_WHITESPACE
The cost of tie is 20.0
>>> print(purchases.purchases[2]) # doctest:+NORMALIZE_WHITESPACE
The cost of shoes is 50.0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| [
"[email protected]"
] | |
eed58a6b703faab6b504f4b3a66b4de43ae04f0a | e75521f26a9a6fdbd0b9dbe396b14a5f3c1af305 | /src/repositories/word_classifier_repository.py | 10cf90739a261923161b283cb2b1127ab1de82cd | [] | no_license | Ap3lsin4k/words-as-part-of-speech | 2636edb87d309d44d3d18add14aadd13f7810507 | e7f35d56d65a8f5033498f650265cadbd742a9de | refs/heads/master | 2023-01-31T19:01:11.007917 | 2020-12-15T10:57:20 | 2020-12-15T10:57:20 | 320,807,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,258 | py | from bookmark_entity import Bookmark
from language_entity import LanguageEntity
from repositories.dictionary_surfer_common import DictionarySurferRepository
class WordClassifierRepository(DictionarySurferRepository):
def __init__(self, dictionary_entity: LanguageEntity):
super().__init__(dictionary_entity)
def make_response_model(self, part_of_speech, input_word):
self.result = {part_of_speech: {}}
for category_of_property, properties in self.dictionary[part_of_speech].items():
bookmark = Bookmark(part_of_speech, category_of_property)
self.__classify_word_by_property(bookmark, input_word)
if len(self.result[part_of_speech]) == 0:
self.result = None
def __save_property_of_word_to_presentable_format(self, bookmark):
self.result[bookmark.get_part_of_speech()].update({bookmark.category_name: bookmark.property_name})
def __classify_word_by_property(self, bookmark, input_word):
for bookmark.property_name in self.dictionary.get_properties(bookmark):
words_tuple = self.dictionary.get_words_for_property(bookmark)
if input_word in words_tuple:
self.__save_property_of_word_to_presentable_format(bookmark) | [
"[email protected]"
] | |
dbdefdabc3655301bdbe56e2f119a6047094282f | c0ea00e5744464c25e608c0274735424661aabf0 | /V3_TAD_CALLER_NOR_RAW/with_CTCF/Simple_Compare_CTCF.py | e62d742b8bbefee87b54a65094f5b3623ceff818 | [] | no_license | yang714/TAD_Thesis | 8441578454d1416c30a90e1981ec38292fded0c7 | b0d5a018557a3444ac291c80424543e58a98fa35 | refs/heads/master | 2020-07-15T07:23:20.180843 | 2019-08-31T08:13:21 | 2019-08-31T08:13:21 | 205,511,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | from keras.models import load_model
import matplotlib.pyplot as plt
from keras.utils import np_utils
import numpy as np
import pandas as pd
| [
"[email protected]"
] | |
6d951fcf28c05693f6c5c9e9643b5d7a6ac3a2ca | 3c6ca5ce1f6a5bc120d05f31ba643083cccc9e70 | /Origani/asgi.py | 3552392274b3f7ed16c9e86f4dd46cdd15fdc357 | [] | no_license | Mounir-Bennacer/ecommerce | efa3ca9482a5cbd36bd9f4ef787b6c79fc040160 | 2041a20d5932980b5818e12c059ae15ff65837a9 | refs/heads/master | 2022-12-10T12:54:45.582358 | 2020-08-30T17:39:47 | 2020-08-30T17:39:47 | 291,323,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
ASGI config for Origani project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Origani.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
826e3ace59be293f7511e153eab6277f15af6600 | 11338e8dd7cd5d13ae263fb8a3ec4226d5fe05bb | /demo/medusa/auth_handler.py | 40b5737a026c6df658b29499398106bdf7573477 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-mit-old-style",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | balabit-deps/balabit-os-6-m2crypto | 35156453bab88fbfa23a1910483973fc33abaab1 | 10194787e9e541f9970aab739bbe2beae8c1bc2e | refs/heads/master | 2021-03-16T07:54:51.324933 | 2015-12-12T03:31:33 | 2016-10-27T16:27:28 | 124,134,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,537 | py | # -*- Mode: Python; tab-width: 4 -*-
#
# Author: Sam Rushing <[email protected]>
# Copyright 1996-2000 by Sam Rushing
# All Rights Reserved.
#
# support for 'basic' authenticaion.
import base64
import hashlib
import re
import string
import time
import counter
import default_handler
get_header = default_handler.get_header
import http_server
import producers
# This is a 'handler' that wraps an authorization method
# around access to the resources normally served up by
# another handler.
# does anyone support digest authentication? (rfc2069)
class auth_handler:
def __init__(self, dict, handler, realm='default'):
self.authorizer = dictionary_authorizer(dict)
self.handler = handler
self.realm = realm
self.pass_count = counter.counter()
self.fail_count = counter.counter()
def match(self, request):
# by default, use the given handler's matcher
return self.handler.match(request)
def handle_request(self, request):
# authorize a request before handling it...
scheme = get_header(AUTHORIZATION, request.header)
if scheme:
scheme = string.lower(scheme)
if scheme == 'basic':
cookie = AUTHORIZATION.group(2)
try:
decoded = base64.decodestring(cookie)
except:
print 'malformed authorization info <%s>' % cookie
request.error(400)
return
auth_info = string.split(decoded, ':')
if self.authorizer.authorize(auth_info):
self.pass_count.increment()
request.auth_info = auth_info
self.handler.handle_request(request)
else:
self.handle_unauthorized(request)
#elif scheme == 'digest':
# print 'digest: ',AUTHORIZATION.group(2)
else:
print 'unknown/unsupported auth method: %s' % scheme
self.handle_unauthorized()
else:
# list both? prefer one or the other?
# you could also use a 'nonce' here. [see below]
#auth = 'Basic realm="%s" Digest realm="%s"' %
# (self.realm, self.realm)
#nonce = self.make_nonce (request)
#auth = 'Digest realm="%s" nonce="%s"' % (self.realm, nonce)
#request['WWW-Authenticate'] = auth
#print 'sending header: %s' % request['WWW-Authenticate']
self.handle_unauthorized(request)
def handle_unauthorized(self, request):
# We are now going to receive data that we want to ignore.
# to ignore the file data we're not interested in.
self.fail_count.increment()
request.channel.set_terminator(None)
request['Connection'] = 'close'
request['WWW-Authenticate'] = 'Basic realm="%s"' % self.realm
request.error(401)
def make_nonce(self, request):
"A digest-authentication <nonce>, constructed as suggested in RFC 2069"
ip = request.channel.server.ip
now = str(long(time.time()))[:-1]
private_key = str(id(self))
nonce = string.join([ip, now, private_key], ':')
return self.apply_hash(nonce)
def apply_hash(self, s):
"Apply MD5 to a string <s>, then wrap it in base64 encoding."
m = hashlib.new()
m.update(s)
d = m.digest()
# base64.encodestring tacks on an extra linefeed.
return base64.encodestring(d)[:-1]
def status(self):
# Thanks to [email protected] (Mike Meyer)
r = [
producers.simple_producer(
'<li>Authorization Extension : '
'<b>Unauthorized requests:</b> %s<ul>' % self.fail_count
)
]
if hasattr(self.handler, 'status'):
r.append(self.handler.status())
r.append(
producers.simple_producer('</ul>')
)
return producers.composite_producer(
http_server.fifo(r)
)
class dictionary_authorizer:
def __init__(self, dict):
self.dict = dict
def authorize(self, auth_info):
[username, password] = auth_info
if (username in self.dict) and (self.dict[username] == password):
return 1
else:
return 0
AUTHORIZATION = re.compile(
# scheme challenge
'Authorization: ([^ ]+) (.*)',
re.IGNORECASE
)
| [
"[email protected]"
] | |
c7e2d80388cbe425136e01a06bdb2ea24fa604c6 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_coadd.py | 39e21f206956741881cd664d37e0bb5ecdba667f | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[248.990167,34.240833], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
8c3d403ab50c13348610501913946d6eef005ba4 | b7ff0463dc6d69a3d9d3ad12321741c182317551 | /microsoft.py | bb8fc7c59c4921a252b919a8a9ecfcc5d3f09e11 | [] | no_license | ranahaani/msstream-dl | 506408b972b704e89cbb0280723a04818f74f369 | c051e654675134c9a76668e7dc859273c9d59334 | refs/heads/master | 2023-01-06T17:04:20.979135 | 2020-10-27T12:28:03 | 2020-10-27T12:28:03 | 303,945,063 | 9 | 4 | null | 2020-11-17T09:24:26 | 2020-10-14T08:04:56 | JavaScript | UTF-8 | Python | false | false | 1,475 | py | import os
import sys
import getopt
from scripts.handler import Downloader
from pathlib import Path, PurePath
def main():
options = getopt.getopt(sys.argv[1:], '', ['username=', 'video='])
username_arg = None
video_arg = None
errs = False
for opt, arg in options[0]:
if opt in '--username':
username_arg = arg
if opt in '--video':
video_arg = arg
if username_arg is None:
print(
'--username parameter is missing, pass your MS Stream account username with '
'[email protected]\n')
errs = True
if video_arg is None:
print('--video parameter is missing, pass the video link with --video=link\n')
errs = True
try:
if not errs:
cookie_path = str(PurePath(Path.home(), "Downloads"))
cookies_files = [os.path.join(cookie_path, i) for i in os.listdir(cookie_path) if
os.path.isfile(os.path.join(cookie_path, i)) and 'mscookies' in i]
for filename in cookies_files:
try:
os.remove(filename)
except OSError:
pass
Downloader(username_arg, video_arg)
except KeyboardInterrupt:
print("\nInterrupted by user. Aborting!\n")
sys.exit(1)
except Exception as ex:
print("\nError: " + str(ex) + "\n")
sys.exit(1)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
2fb88f1edc531e4764e6982e5c963b89eb59aaf7 | acc04607c4f4f4b0b044b1bea521ca92e0930041 | /bin/show_config.py | 103195121414b190b04b745bc91c07aa48c88128 | [] | no_license | TangoJP/npctransport | c8c6587cd8ee16b2a9f6fd87e98cf1a6a8f8dba0 | 40bc9e2e0b4dc1df72b6d22000269d494d8f4f69 | refs/heads/master | 2020-03-27T22:08:00.779328 | 2013-04-24T00:01:19 | 2013-04-24T00:01:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | #!/usr/bin/python
from IMP.npctransport import *
import sys
f=open(sys.argv[1], "rb")
config= Configuration()
config.ParseFromString(f.read())
print config
| [
"[email protected]"
] | |
64bc916865d2f533eb5dc449e459d972c3969181 | 4f6ba562b491fbb49194b20b707e25005e1640de | /AMTTA.py | cf6bff4e58eb9aa1c41df2aecf37715f1a14c1e4 | [] | no_license | royson/AMT | 3a38f1eb09100b485125c3fef53933ef550d04d4 | 5ef03d78a657177f5281cf76391b05128fe4fb92 | refs/heads/master | 2021-08-30T18:27:39.895011 | 2017-12-19T01:05:49 | 2017-12-19T01:05:49 | 114,703,130 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,997 | py | # AMT's Traffic Analysis Only.
# Royson Lee - 25 Jun 2017
#! /usr/bin/env python2.7
from scapy.all import *
try:
from scapy.layers.ssl_tls import *
except ImportError:
from scapy_ssl_tls.ssl_tls import *
from netfilterqueue import NetfilterQueue
import sys
import struct
import ssl
import time
datasize = {}
last_timing = {}
timing = {}
REVERSE_HTTPS_PACKET_THRESHOLD = 90
REVERSE_TCP_PACKET_THRESHOLD = 78
REVERSE_HTTPS_SIZE_UBOUND = 2700000
REVERSE_HTTPS_SIZE_LBOUND = 1200000
REVERSE_HTTPS_TIME_UBOUND = 0.1
REVERSE_HTTPS_TIME_LBOUND = 0.001
REVERSE_TCP_SIZE_UBOUND = 2700000
REVERSE_TCP_SIZE_LBOUND = 1000000
REVERSE_TCP_TIME_UBOUND = 0.3
REVERSE_TCP_TIME_LBOUND = 0.01
# For Evaluation
pktdump = PcapWriter("test.pcap",append=True, sync=True)
def check_meterpreter_https_signature(sizes, timings):
# Check if sizes start with 256 contains 16448 and contains
# 176 after
try:
if 256 in sizes[0:3] \
and sizes.index(16448) < (len(sizes) - sizes[::-1].index(176) - 1):
return \
REVERSE_HTTPS_SIZE_LBOUND < total_bytes(sizes, 16448, 176) < REVERSE_HTTPS_SIZE_UBOUND \
and \
REVERSE_HTTPS_TIME_LBOUND < mean_timing(timings) < REVERSE_HTTPS_TIME_UBOUND
except (ValueError, IndexError) as e:
return False
return False
def check_meterpreter_tcp_signature(sizes, timings):
try:
if 144 in sizes[0:3] \
and sizes.index(16448) < (len(sizes) - sizes[::-1].index(144) - 1):
return \
REVERSE_TCP_SIZE_LBOUND < total_bytes(sizes, 16448, 144) < REVERSE_TCP_SIZE_UBOUND \
and \
REVERSE_TCP_TIME_LBOUND < mean_timing(timings) < REVERSE_TCP_TIME_UBOUND
except (ValueError, IndexError) as e:
return False
return False
def total_bytes(sizes, first, second):
# This function counts the total number of bytes between
# and excluding first and second
# Assume first and second exist and
# sizes.index(second) > sizes.index(first)
try:
f_i = sizes.index(first)
s_i = sizes[f_i:].index(second)
print(sum(sizes[f_i+1:f_i+s_i]))
return sum(sizes[f_i+1:f_i+s_i])
except (ValueError, IndexError) as e:
return 0
def signature_after(sizes, first, signature):
# This function checks if a signature exist after sizes.index(first)
try:
f_i = sizes.index(first)
s_i = sizes[f_i:].index(signature[0])
return sizes[f_i+s_i:][0:len(signature)] == signature
except (ValueError, IndexError) as e:
return False
def mean_timing(timings):
print((sum(timings)/len(timings)))
return(sum(timings)/len(timings))
def retrieve_key(pkt):
key = str(pkt.src) + str(pkt[TCP].sport) +\
str(pkt.dst) + str(pkt[TCP].dport)
return key
def alert(pkt, msg):
print("[*] Possible " + msg + " Detected")
print("[*] Source: " + str(pkt.src) + ":" + str(pkt[TCP].sport))
print("[*] Destination: " + str(pkt.dst) + ":" + str(pkt[TCP].dport))
def analyse_pkt(data):
pkt = IP(data.get_payload())
if pkt.haslayer('SSL') and pkt.haslayer('TLSServerHello'):
#Wipe information
key = retrieve_key(pkt)
last_timing[key] = None
timing[key] = []
datasize[key] = []
if pkt.haslayer('SSL') and not pkt.haslayer('TLSHandshake') \
and not pkt.haslayer('TLSChangeCipherSpec'):
records = pkt[SSL].records
key = retrieve_key(pkt)
new_datasize = 0
for r in records:
if r.haslayer('TLSRecord'):
new_datasize = new_datasize + r[TLSRecord].length
if new_datasize:
#Get timing of key
curr_time = time.time()
if not last_timing.setdefault(key, None):
timing.setdefault(key,[]).append(0)
else:
prev_time = last_timing[key]
timing[key].append(curr_time - prev_time)
last_timing[key] = curr_time
datasize.setdefault(key, []).append(new_datasize)
pktdump.write(pkt)
# print(key + " : " + str(len(timing[key])))
# Extracting up to threshold and then evaluate
if len(timing[key]) == REVERSE_TCP_PACKET_THRESHOLD:
#print(timing[key])
#print(datasize[key])
if check_meterpreter_tcp_signature(datasize[key],timing[key]):
alert(pkt, "reverse_tcp Meterpreter Session")
if len(timing[key]) == REVERSE_HTTPS_PACKET_THRESHOLD:
#print(timing[key])
#print(datasize[key])
if check_meterpreter_https_signature(datasize[key],timing[key]):
alert(pkt, "reverse_https Meterpreter Session")
data.accept()
nfqueue = NetfilterQueue()
nfqueue.bind(1, analyse_pkt) # 1 is the queue number
try:
print("Awaiting data")
nfqueue.run()
except KeyboardInterrupt:
pass
| [
"[email protected]"
] | |
25d047f5bab2356e2de9bbf4f65e65e5247371f2 | dd4bb65cf9f1f0b2820e95b3780080f149f4bf8d | /tmp/available_task_pc_match.py | fc5541576e8fc8ddeef64aac8fabe49d7211b426 | [] | no_license | sa615153/form_data_test_css | 184569e9aa76e51fd12b44f18f80ad1ea48a94c6 | f8fee0b54839c8f0b7a9fd6a5e84db21e36d8b14 | refs/heads/master | 2021-01-12T12:42:47.249558 | 2017-01-20T10:09:33 | 2017-01-20T10:09:33 | 69,649,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,402 | py | # -*- coding: utf-8 -*-
from database import Session
from flask_restful import reqparse
from flask_restful import abort
from flask_restful import Resource
from flask_restful import fields
from flask_restful import marshal_with
from qa_api.models import MajorTask
from qa_api.models import Machine
from qa_api.models import SubTask
from qa_api.models import SubtaskProperty
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy import or_
import time
parser = reqparse.RequestParser()
parser.add_argument('task', type=str)
parser.add_argument('tracknumber', type=str)
parser.add_argument('status', type=str)
class AvailableTaskPCMatch(Resource):
def sub_is_todo(x):
if x.status == 0:
return True
else:
return False
def get(self):
# 创建独立session,为互斥使用,贯彻整个get
session = Session()
# 将来要返回给dispatcher的初始“任务-机器”对 列表
return_list = []
############################################
###lock using table machine
############################################
# find idle machines
idle_machine_list = session.query(Machine).with_lockmode('update').filter(Machine.status == 0).all()
# find conclusion report type subtasks in subtask table
conclusion_report_list = session.query(SubTask).filter(SubTask.name == 'report').all()
# filter to figure out all windows machines to do report in idle machine list
available_doreport_machine_list = filter(lambda x: True if x.label == 'windows' else False, idle_machine_list)
# assign reports to idle windows machines
for ival in range(0, len(conclusion_report_list) - 1):
if ival < len(available_doreport_machine_list):
return_list.append((conclusion_report_list[ival], available_doreport_machine_list[ival]))
# remove target machine cos it has been assigned to do report
idle_machine_list.remove(available_doreport_machine_list[ival])
# end of report subtask assginment
####################################################################
# process to assign normal subtasks by priority or by precondition #
####################################################################
# get test2 to_do task
todo_Test2_list = session.query(MajorTask). \
filter((MajorTask.is_test2 == 1)). \
filter(or_(MajorTask.status == 0, MajorTask.status == 1)). \
all()
for task in todo_Test2_list:
# gather to_do subtasks
todo_subtask_list = filter(lambda x: True if x.status == 0 else False, task.subtasks)
for subtask in todo_subtask_list:
# ignore report tasks for they have been assigned
if subtask.name == "report":
continue
# normal ones, check precondition
pre = subtask.property.precondition
if pre == "no": # 无前提条件
pass # assign this subtask
else:
'''check the precondition subtask and if its done'''
prelist = filter(lambda x: True if x.name == pre else False, task.subtasks)
if prelist[0].status == 0 or prelist[0].status == 1: # 前提条件未完成
continue # 不分配此subtask
elif prelist[0].status == 2 and prelist[0].result == 'failed':
continue
else:
available_machine_list = filter(lambda x: True if x.label == subtask.property.label else False,
idle_machine_list)
pass # assign this subtask
# get test1&2 to_do task
todo_Test2_list = session.query(MajorTask). \
filter((MajorTask.is_test2 == 1)). \
filter(or_(MajorTask.status == 0, MajorTask.status == 1)). \
all()
# get test1 to_do task
todo_Test2_list = session.query(MajorTask). \
filter((MajorTask.is_test2 == 1)). \
filter(or_(MajorTask.status == 0, MajorTask.status == 1)). \
all()
# 空闲windows机器列表
idle_quality_machines = session.query(Machine).with_lockmode('update').filter(Machine.status == 0,
Machine.label == 'windows').all()
# 空闲linux机器列表
idle_linux_compile_machines = session.query(Machine).with_lockmode('update').filter(Machine.status == 0,
Machine.label == 'linux').all()
# 机器状态
machine_dict = {'windows': idle_quality_machines, 'linux': idle_linux_compile_machines}
# 空闲任务(todo doing)
idle_task_list = session.query(MajorTask).filter(
or_(MajorTask.status == 0, MajorTask.status == 1)).all()
print("type of idle_task_list %s" % type(idle_task_list))
def find_match(machine_dict):
for major_task in idle_task_list:
for subtask in filter(sub_is_todo, major_task.subtasks):
subtask_machine_label = session.query(SubtaskProperty).filter(
SubtaskProperty.subtask_name == subtask.name).all()
print("subtask_machine_label:%s" % type(subtask_machine_label[0]))
subtask_property = subtask_machine_label[0]
# print("KLKLK:%s" %temp.label)
if len(machine_dict[subtask_property.label]) == 0: # this label no machine
continue
else:
target_machine = machine_dict[subtask_property.label].pop() # get the target machine
print("target::::%s" % target_machine)
return (subtask, target_machine)
return 0
find_match_result = find_match(machine_dict)
if find_match_result != 0: # get available task machine match success
'''
# change the database
# change the subtask.status from 0 to 1(todo to doing)
# set subtask.machine_name with the target_machine.hostname
# change the target_machine.machine_status from 0 to 1(idle to busy)
# change MajorTask.status,,,before 0 now 1(todo to doing),,,before 1 now 1 or 2(doing to doing or done)
'''
# find_match_result[0].subtask_status = 1
# find_match_result[0].machine_name = find_match_result[1].hostname
# find_match_result[1].machine_status = 1
#
# cur_major = find_match_result[0].MajorTask
#
# if cur_major.task_status == 0:#before the Majortask is todo,change it to doing
# cur_major.task_status = 1
#
# elif cur_major.task_status == 1:#before the Majortask is doing, it is doing
# cur_major.task_status = 1#do nothing password
############################################
###unlock using table machine
############################################
# time.sleep(10)
session.commit()
subtask_list = find_match_result[0]
machine_list = find_match_result[1]
print("find***:%s" % find_match_result[0])
print
return {"task name:": subtask_list.major_task_track_number,
"subtask_type:": subtask_list.name,
"machine:": machine_list.IP}
else: # find_match_result == 0
############################################
###unlock using table machine
############################################
session.commit()
return {"task name:": None, "subtask_type:": None,
"machine:": None}
def modify_machine_status(self, session, status, machine_list):
pass
def modify_subtask_table(self, session, status, subtask_list):
pass
| [
"[email protected]"
] | |
83bf3dd50c32c33346cf5fe78a82a70c576a848e | 1e9c83938491e8bc5826e4468979aca29a1efa0e | /stripe.py | 810b933ad0b796d194ff688d658d911e299f8d5d | [] | no_license | crimzon96/markio-snippets | 75065cd9a9e1563b6c8b10481c686d87114928ec | c91c97bbff48afde453b3f7ac70ddd4b3120147c | refs/heads/main | 2023-01-06T02:02:40.639240 | 2020-11-06T10:28:24 | 2020-11-06T10:28:24 | 310,308,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,189 | py | def stripe_create_payout(vendor, amount, currency):
# Request payout for connected account ( Seller )
Transaction = apps.get_model("marketplace", "Transaction")
Payout = apps.get_model("marketplace", "Payout")
stripe_amount = stripe_convert_application_to_stripe_amount(amount)
fee = stripe_convert_stripe_to_application_fee(stripe_amount)
if stripe_retrieve_first_bank_account(vendor.stripe_id):
currency = stripe_retrieve_first_bank_account(vendor.stripe_id).get("currency")
else:
currency = None
if currency:
response = stripe.Payout.create(
amount=stripe_amount, currency=currency, stripe_account=vendor.stripe_id
)
else:
return {
"status": "failed",
"message": "Vendor first needs to add approved bank account",
}
with atomic_transaction.atomic():
payout = Payout.objects.create(
currency=currency,
stripe_id=response.get("id"),
method="stripe_payout",
fee=fee,
amount=amount,
status=Payout.OMW
if response.get("failure_code") is not (None or "null")
else Payout.FAILED,
data=response,
)
Transaction.objects.create(vendor=vendor, payout=payout, type="payout")
return response
def stripe_update_bank_account(
vendor_id,
country,
currency,
account_number,
bank_account_obj="bank_account",
routing_number=None,
):
account = stripe_retrieve_account(vendor_id)
if account and account.get("external_accounts"):
# Get first bank account
bank_account = stripe.Account.modify_external_account(
id=vendor_id,
external_account={
"object": bank_account_obj,
"country": country,
"currency": currency,
"account_number": account_number,
"routing_number": routing_number,
},
)
if bank_account and bank_account.get("status") == "new":
return (True, "Success")
return (False, "Something went wrong it looks like you dont have a bank account")
| [
"[email protected]"
] | |
f82a7850addf3773f1ce92a89e4d51f96cf3f763 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2_neat/16_0_2_tkdkop_pancake.py | 259ec04a68548d92ceed7f438162fc6b46baa760 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 286 | py | #!/usr/bin/env python
import sys
import itertools
m = sys.stdin.readline()
i = 0
for line in sys.stdin.readlines():
line = line.strip()
i += 1
out_str = "Case #%d: " % i
line += '+'
k = itertools.groupby(line)
out_str += str(len(list(k))-1)
print out_str
| [
"[[email protected]]"
] | |
32c66a3d65f250e18cdd2c4b1e3227858be2d60a | 093ae3137ffdb8b7f2f4d0687208515cfc0512b6 | /month02/day15/exercise02.py | 10cb6283cd153fcb503e608b72c0e877bddf84e3 | [
"Apache-2.0"
] | permissive | Dante9527-A/study | afde3f85ad8f0b3a974d1b2e82c8b431ac62f847 | f23f098c708bad42e29b601a57a93309901db3a9 | refs/heads/main | 2023-02-27T19:02:55.390962 | 2021-02-01T10:38:08 | 2021-02-01T10:38:08 | 334,914,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | from socket import *
from threading import Thread
import sys
HOST = "0.0.0.0"
PORT = 8888
ADDR = (HOST,PORT)
def handle(connfd):
while True:
data = connfd.recv(1024)
if not data:
break
print(data.decode())
connfd.close()
def main():
tcp_socket = socket()
tcp_socket.bind(ADDR)
tcp_socket.listen(5)
while True:
try:
connfd,addr = tcp_socket.accept()
print("Connect from",addr)
except:
tcp_socket.close()
sys.exit("服务结束")
t = Thread(target=handle,args=(connfd,),daemon=True)
t.start()
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
39eb7173cdd5f6dbd364ee7286d195fa1117b8d4 | 8431b28994b082c1be46acf5f9bd3ed0bf9f5d1c | /init.py | 1deb9dcf4b02e770425e533ffde994713cce27f9 | [] | no_license | rochSmets/pywi | ac26fcaf0140de4f72cea4819e3a1d2c41ee3fad | 144589b071de170d42963c657c2c67211fa0d312 | refs/heads/master | 2021-03-29T10:53:53.734344 | 2020-09-08T12:18:53 | 2020-09-08T12:18:53 | 247,947,655 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,130 | py | #!/opt/local/bin/python
# encoding: utf-8
import numpy as np
import matplotlib.pyplot as plt
class CondInit(object):
def __init__(self, domsize,ncells):
self.L = domsize
self.ncells = ncells
self.resize(domsize,ncells)
def resize(self,domsize,ncells):
dl0 = domsize[0]/float(ncells[0]-2.)
dl1 = domsize[1]/float(ncells[1]-2.)
self.dl = (dl0,dl1)
self.domsize = domsize
self.ncells = ncells
self.x0 = -self.dl[0] + np.arange(ncells[0])*self.dl[0]
self.x1 = -self.dl[1] + np.arange(ncells[1])*self.dl[1]
def debye(self):
return np.sqrt(self.Te()/self.density())/self.c
def wpe(self):
return np.sqrt(1./self.memi)/self.c
def Te(self):
return np.ones(self.ncells[1])
def Ti(self):
return np.ones(self.ncells[1])
def B(self):
return np.ones(self.ncells[1])
def density(self):
return np.ones(self.ncells[1])
def dlmax(self):
return 1.5*np.min(self.debye())
def cfl_light(self):
return 1./self.c *1./np.sqrt(1./self.dl[0]**2 + 1./self.dl[1]**2)
def __str__(self):
st = 'INITIALISATION\n'
st += '--------------\n'
if min(self.dl) < self.dlmax():
st += 'mesh size (%f,%f) can be increased\n' % self.dl
if min(self.dl) > self.dlmax():
st += 'WARNING : mesh size (%f,%f) is larger than the recommended one\n' % self.dl
st += 'Maximum Mesh Size : 1.5*min(Debye) = 1.5*%f = %f\n' % (np.min(self.debye()),self.dlmax())
st += 'Plasma time step should be smaller than 0.1min(wpe^-1) = %f\n' % (0.1*np.min(self.wpe()))
st += 'Field time step should be smaller than CFL_Light = %f, recommended 0.3CFL = %f\n' % (self.cfl_light(), 0.3*self.cfl_light())
return st
def plot(self):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(self.x1, self.B())
ax.plot(self.x1, self.density())
ax.plot(self.x1, self.Ti())
ax.plot(self.x1, self.Te())
ax.set_ylim( (-2., 2.) )
fig.savefig('init.eps')
class Thermal(CondInit):
#==========================================================
#==========================================================
def __init__(self,teti=0.2,me=0.01,c=25.,n=1.,ttot=0.5,
domsize=(10.,10.),ncells=(1000, 1000)):
super(Thermal, self).__init__(domsize,ncells)
self.memi = me
self.domsize = domsize
self.ncells = ncells
self.teti = teti
self.c = c
self.n = n
self.ttot = ttot
#==========================================================
def density(self):
return np.ones(self.ncells[1])
def Te(self):
return self.teti*self.ttot/(self.teti+1.) * np.ones(self.ncells[1])
def Ti(self):
return self.Te()*self.ttot/self.teti
def B(self):
return np.ones(self.ncells[1])
class DoubleHarris(CondInit):
#==========================================================
#==========================================================
def __init__(self,teti=0.2,nb=0.2,me=0.01,c=25.,l=1.,ttot=0.5,
domsize=(10.,10.),ncells=(1000, 1000)):
super(DoubleHarris, self).__init__(domsize,ncells)
self.memi = me
self.domsize = domsize
self.ncells = ncells
self.teti = teti
self.c = c
self.l = l
self.nb = nb
self.ttot = ttot
#==========================================================
def density(self):
return 1./np.cosh((self.x1-self.x1.max()*0.25)/self.l)**2\
+ 1./np.cosh((self.x1-self.x1.max()*0.75)/self.l)**2\
+ self.nb
def Te(self):
return self.teti*self.ttot/(1.+self.teti) + np.zeros(self.x1.size)
def Ti(self):
return self.ttot/(1.+self.teti) + np.zeros(self.x1.size)
def B(self):
return np.tanh((self.x1-self.x1.max()*0.25)/self.l)\
- np.tanh((self.x1-self.x1.max()*0.75)/self.l)
class Couche1(CondInit):
#==========================================================
#==========================================================
def __init__(self,te=0.2,beta=1.,me=0.01,c=25.,l=1.,
domsize=(10.,10.),ncells=(1000, 1000)):
super(Couche1, self).__init__(domsize,ncells)
self.memi = me
self.domsize = domsize
self.ncells = ncells
self.te = te
self.c = c
self.beta = beta
self.l = l
#==========================================================
def density(self):
return np.ones(self.ncells[1])
def Te(self):
return self.te*np.ones(self.ncells[1])
def Ti(self):
b2 = self.B()**2
cte = (self.beta + 1.)* 1.**2/2.
return (cte - b2/2.)/self.density() - self.Te()
def B(self):
x1mid = 0.5*self.x1.max()
return np.tanh((self.x1-x1mid)/self.l)
| [
"[email protected]"
] | |
f866bc12b8b1b500c309053924dcf816c46f9f05 | 5ed5b3f7db4cdc904f8a16085941cb28b3fef6c8 | /chat.py | d37ebe6ed9f825bf69ac5906f43d02fd76e0f1be | [] | no_license | kylejava/PythonChatroom | bf7b30fcb46c6c178cd3c8703a3c76d8f2c2325c | 502ea55604c6861deb8f4305cbda75d9e0f500b5 | refs/heads/main | 2023-04-09T01:14:37.880496 | 2021-04-25T03:24:31 | 2021-04-25T03:24:31 | 360,782,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | from socket import *
serverName = 'localhost'
serverPort = 20000
server = socket(AF_INET, SOCK_DGRAM)
server.bind(('' , serverPort))
while True:
message, clientAddress = server.recvfrom(2048)
print(message.decode())
| [
"[email protected]"
] | |
79e962b40ea02701d83a67b9e3a523ea362ce52e | f811f10c2d7808f8a5b7192e4b519f679f8db30c | /FreshData/com/mario/v160Sp1/ValidateColumn.py | a382be3cfa08b39d545a1f0021b7c6a74070d091 | [] | no_license | greenetdw/python-item | 0b1be9ced4b61dc31f81c6df9df8e506d52d30d6 | ba77cc6d555be73cb51d547f010c786820c87e00 | refs/heads/master | 2020-04-07T16:20:31.052209 | 2018-11-21T09:47:02 | 2018-11-21T09:47:02 | 158,525,133 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,241 | py | # -*- coding: UTF-8 -*-
from com.mario.utils.MysqlUtil import MysqlUtil
from com.mario.utils.common import getcitys
#检验表的字段名是否存在
def validateColumnExists(tableName, colunmName):
sql = "SELECT count(*) FROM information_schema.columns WHERE table_schema='assacs' AND table_name = '%s' AND column_name = '%s';" % (tableName, colunmName)
app = MysqlUtil()
rs = app.fetchone(sql)
if rs[0] > 0 :
return True
else :
return False
def getAllEncTablesColumns():
t_dict = {}
t_dict['common_task_data_'] = ['dot_lat_enc', 'dot_lng_enc', 'photo_lat_enc', 'photo_lng_enc']
t_dict['common_task_data_history_'] = ['dot_lat_enc', 'dot_lng_enc', 'photo_lat_enc', 'photo_lng_enc']
t_dict['inner_road_audit_marker_'] = ['dot_lat_enc', 'dot_lng_enc']
t_dict['fruit_aoi_'] = ['photo_lng_lat_enc']
t_dict['fruit_build_name_']=['photo_lng_lat_enc','dot_lat_enc', 'dot_lng_enc' ]
t_dict['fruit_build_unit_']=['photo_lng_lat_enc','dot_lat_enc', 'dot_lng_enc' ]
t_dict['fruit_inroad_point_']=['photo_lng_enc','photo_lat_enc']
t_dict['fruit_inroad_track_']=['photo_lng_enc','photo_lat_enc']
t_dict['fruit_aoi_error_']=['photo_lng_lat_enc','dot_lat_enc', 'dot_lng_enc' ]
t_dict['fruit_inroad_error_']=['photo_lng_lat_enc','dot_lat_enc', 'dot_lng_enc' ]
return t_dict
def getColumns():
t_dict = {}
t_dict['common_task_workflow_']=['buildunit_audit_status']
t_dict['common_task_data_buildunit_']=['first_audit_user', 'first_audit_get_time', 'first_audit_completed_time']
def validateTableEncColumn():
for city in getcitys():
print 'process ' + city
t_dict = getAllEncTablesColumns()
for key in t_dict.keys():
tableName = key + city
columns = t_dict[key]
# print 'process ' + tableName
for columnName in columns:
columnExists = validateColumnExists(tableName, columnName)
if columnExists == False:
print tableName + ", " + columnName + ' is not Exists'
return False
return True
if __name__ == '__main__':
isAllEnc = validateTableEncColumn()
print isAllEnc
| [
"[email protected]"
] | |
8e72e5d4714aeb1364c05856f35fbcf2ab342394 | 02eace51e56520b743e7cc33826bb1c28b96de0e | /src/utils/model.py | 034d11a74bd934bc96e4a2bb3db4ad9c770a3b1d | [] | no_license | nomanalahi/abstractive_and_extractive_text_summary | 172b8e96654b333bacab76ae802b2ee67927e616 | d7f580daa6866951e5e57a063cefed8672e00a9f | refs/heads/master | 2023-05-12T08:00:41.591020 | 2021-06-05T16:24:16 | 2021-06-05T16:24:16 | 374,160,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71 | py | from pydantic import BaseModel
class Text(BaseModel):
article: str | [
"[email protected]"
] | |
4723c6f7c093e3989d133cebab10e0c13bf512a0 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03997/s926418877.py | acd277945016fcae9d48adcc8806653b1aeeec5f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | a,b,c,d=eval('['+'int(input()),'*3+'0]');print((a+b)*c//2) | [
"[email protected]"
] | |
cdaec89a7ecfa4ae8042bf31ac073b89b8a58072 | a3387fbcc918acb55d289ffb61b9fb603203dc11 | /Puzzles/2022-01/01_22_balanced_days.py | 924f5189761f280c72866b5565b743883fbda28e | [] | no_license | fbhs-cs/purdys-puzzles | 13e970ff909ff2e093b3b9d9777faac47c099913 | 1cf3f9c52677843fad781e46304e1485a91aae58 | refs/heads/master | 2023-08-17T06:28:06.659751 | 2023-08-09T14:45:43 | 2023-08-09T14:45:43 | 212,085,565 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | from math import ceil
def is_balanced(num):
n = str(num)
first = n[:ceil(len(n)/2)]
last = n[len(n)//2:]
#print(first,last)
if sum([int(x) for x in first]) == sum([int(x) for x in last]):
return True
else:
return False
def count_balanced(n):
count = 0
for i in range(1,n):
if is_balanced(i):
count += 1
return count
def sum_balanced(n):
total = 0
for i in range(1,n):
if is_balanced(i):
#print(i)
total += i
return total
def find_balanced_dates():
months = {1:31,2:28,3:31,4:30,5:31,6:30,
7:31,8:31,9:30,10:31,11:30,12:31}
count = 0
sum = 0
for month in range(1,13):
for day in range(1,months[month]+1):
day_num = str(month) + str(day) + '2022'
if is_balanced(int(day_num)):
count += 1
sum += int(day_num)
print(day_num)
print(count)
print(sum)
find_balanced_dates()
| [
"[email protected]"
] | |
9567422e1472a65046cf8160b1bdae8fbcf7dcd3 | 080c13cd91a073457bd9eddc2a3d13fc2e0e56ae | /MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/types/internal.py | c56c7aa6d7790b4c36d248603f2282e60af08a39 | [
"Apache-2.0"
] | permissive | Portfolio-Projects42/UsefulResourceRepo2.0 | 1dccc8961a09347f124d3ed7c27c6d73b9806189 | 75b1e23c757845b5f1894ebe53551a1cf759c6a3 | refs/heads/master | 2023-08-04T12:23:48.862451 | 2021-09-15T12:51:35 | 2021-09-15T12:51:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,129 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Types internal to TensorFlow.
These types should not be exported. External code should not rely on these.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Is this strictly needed? Only ops.py really uses it.
class NativeObject(object):
"""Types natively supported by various TF operations.
The most notable example of NativeObject is Tensor.
"""
| [
"[email protected]"
] | |
28e7dee0700c6fe42c004b939fcaa2b9ff69d27e | eb64b799ff1d7ef3a244bf8e6f9f4e9118d5cfcd | /homeassistant/components/trafikverket_weatherstation/const.py | 7bb53dc5356a0b8a392104982912658806275659 | [
"Apache-2.0"
] | permissive | JeffLIrion/home-assistant | 53966b81b5d5816679f12fc761f79e8777c738d6 | 8f4ec89be6c2505d8a59eee44de335abe308ac9f | refs/heads/dev | 2023-08-22T09:42:02.399277 | 2022-02-16T01:26:13 | 2022-02-16T01:26:13 | 136,679,169 | 5 | 2 | Apache-2.0 | 2023-09-13T06:59:25 | 2018-06-09T00:58:35 | Python | UTF-8 | Python | false | false | 466 | py | """Adds constants for Trafikverket Weather integration."""
from homeassistant.const import Platform
DOMAIN = "trafikverket_weatherstation"
CONF_STATION = "station"
PLATFORMS = [Platform.SENSOR]
ATTRIBUTION = "Data provided by Trafikverket"
ATTR_MEASURE_TIME = "measure_time"
ATTR_ACTIVE = "active"
NONE_IS_ZERO_SENSORS = {
"air_temp",
"road_temp",
"wind_direction",
"wind_speed",
"wind_speed_max",
"humidity",
"precipitation_amount",
}
| [
"[email protected]"
] | |
fa3538d1032825a3792e7d729ca58f30e8f9c1f9 | 509b32366f32ce874f059df63809bb7d423dc611 | /main.py | ffcdd30f1294d626c47aeacd96169aaee94f20c2 | [
"MIT"
] | permissive | k4cg/Rezeptionistin | 3d775eda66bf53ffb4749da49a75906ce1e65aef | 0b0142d225a02b05b34074898c6e1d6a00005d9f | refs/heads/master | 2021-01-17T02:05:21.096015 | 2019-04-20T21:54:34 | 2019-04-20T21:54:34 | 35,680,115 | 5 | 12 | MIT | 2018-12-24T08:08:08 | 2015-05-15T14:58:51 | Python | UTF-8 | Python | false | false | 318 | py | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import sys
import logging
from rezeptionistin import Rezeptionistin
reload(sys)
sys.setdefaultencoding("utf-8")
# start logging
logging.basicConfig(level=logging.DEBUG)
if sys.hexversion > 0x03000000:
raw_input = input
# run bot
bot = Rezeptionistin()
bot.run()
| [
"[email protected]"
] | |
39eff650d6cf8bd49b8a7a3e3ca0386f60b71f8a | 6de05800f20f2b883f3dfb2acf359ea10c860f4c | /8_stack & queue/hard/772. Basic Calculator III.py | 60bbdc48b1c97a820c8df37c61534aedfb2730b3 | [] | no_license | Jerrydepon/LeetCode | 8d505608efeeb98ca81f9a72af278853e52c408a | 93c4712779d3631adca1f40004f2ed9ee4fad766 | refs/heads/master | 2020-05-25T19:24:28.415969 | 2019-10-26T23:39:49 | 2019-10-26T23:39:49 | 187,950,378 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,980 | py | # using two stacks, operation & nums
# mind how to check precedence
# mind '-' after '('
class Solution:
def calculate(self, s: str) -> int:
def operation(op, second, first):
if op == '+':
return first + second
elif op == '-':
return first - second
elif op == '*':
return first * second
elif op == '/':
return first // second
def precedence(current_op, prev_op):
if prev_op == '(' or prev_op == ')':
return False
if (current_op == '*' or current_op == '/') and (prev_op == '+' or prev_op == '-'):
return False
return True
if not s:
return 0
nums, ops, i = [], [], 0
while i < len(s):
c = s[i]
if c == " ":
i += 1
continue
elif c.isdigit():
num = int(c)
while i < len(s) - 1 and s[i+1].isdigit():
num = num * 10 + int(s[i+1])
i += 1
nums.append(num)
elif c == "(":
ops.append(c)
if s[i+1] == '-':
nums.append(0)
elif c == ")":
while ops[-1] != '(':
nums.append(operation(ops.pop(), nums.pop(), nums.pop()))
ops.pop()
elif c in ['+', '-', '*', '/']:
while len(ops) != 0 and precedence(c, ops[-1]):
if len(nums) > 1:
nums.append(operation(ops.pop(), nums.pop(), nums.pop()))
else:
nums.append(operation(ops.pop(), nums.pop(), 0))
ops.append(c)
i += 1
while len(ops) > 0:
nums.append(operation(ops.pop(), nums.pop(), nums.pop()))
return nums[0] | [
"[email protected]"
] | |
b4e4fbd8a910e521ad48ba68625cf1104803eb67 | 231c2facba148ca42aa8bbdc2b9f2f8555b98be3 | /tasks/urls.py | 16a6793d9ea2abc70367eb999711ff1ccc4238fb | [] | no_license | SVDileepKumar/taskapplication | 0d3e24904ca09254a0e2fbf22da999b46812177c | 3070182f8c8c6f0dd25c1234d67b1ea2246eafac | refs/heads/master | 2021-01-20T22:34:15.766198 | 2016-08-14T22:45:19 | 2016-08-14T22:45:19 | 65,689,559 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | from django.conf.urls import url
from . import views
from django.contrib.auth.views import login, logout
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^search/$', views.search, name='search'),
url(r'^task/addsubtasks$', views.addsubtask, name='addsubtask'),
url(r'^task/(?P<id>.+)$', views.taskview, name='taskview'),
url(r'^tasklist/(?P<id>.+)$', views.tasklistview, name='tasklistview'),
]
| [
"[email protected]"
] | |
becaebfd57de87517f83fb188ffe1860ee44300a | f08c79663074bfd104135e1347f3228b29620d24 | /csrt.py | 6da5c8ba236a0d1428f0aadc2f3e058f81921930 | [] | no_license | battcheeks/Computer-Vision | 140e3d0a3b20cba637b275dc6d7ebc5f413a2e31 | ffa8f277312fc4553e25db09a6f53a107d7f4d41 | refs/heads/master | 2022-11-10T19:33:31.721963 | 2020-06-27T09:54:15 | 2020-06-27T09:54:15 | 275,339,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,146 | py | from imutils.video import VideoStream
from imutils.video import FPS
import argparse
import imutils
import time
import cv2
global a,b
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video", type=str,
help="path to input video file")
ap.add_argument("-t", "--tracker", type=str, default="kcf",
help="OpenCV object tracker type")
args = vars(ap.parse_args())
(major, minor) = cv2.__version__.split(".")[:2]
if int(major) == 3 and int(minor) < 3:
tracker = cv2.Tracker_create(args["tracker"].upper())
else:
OPENCV_OBJECT_TRACKERS = {
"csrt": cv2.TrackerCSRT_create,
"kcf": cv2.TrackerKCF_create,
"boosting": cv2.TrackerBoosting_create,
"mil": cv2.TrackerMIL_create,
"tld": cv2.TrackerTLD_create,
"medianflow": cv2.TrackerMedianFlow_create,
"mosse": cv2.TrackerMOSSE_create
}
tracker = OPENCV_OBJECT_TRACKERS[args["tracker"]]()
initBB = None
if not args.get("video", False):
print("[INFO] starting video stream...")
vs = VideoStream(src=0).start()
time.sleep(1.0)
else:
vs = cv2.VideoCapture(args["video"])
fps = None
# loop over frames from the video stream
while True:
# grab the current frame, then handle if we are using a
# VideoStream or VideoCapture object
frame = vs.read()
frame = frame[1] if args.get("video", False) else frame
# check to see if we have reached the end of the stream
if frame is None:
break
frame = imutils.resize(frame, width=500)
(H, W) = frame.shape[:2]
# check to see if we are currently tracking an object
if initBB is not None:
(success, box) = tracker.update(frame)
if success:
(x, y, w, h) = [int(v) for v in box]
cv2.rectangle(frame, (x, y), (x + w, y + h),
(0, 255, 0), 2)
print(str(x+w/2)+","+str(y+h/2))
a=str(x+w/2)
b=str(y+h/2)
# update the FPS counter
fps.update()
fps.stop()
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
if key == ord("s"):
initBB = cv2.selectROI("Frame", frame, fromCenter=False,
showCrosshair=True)
tracker.init(frame, initBB)
fps = FPS().start()
elif key == ord("q"):
break
if not args.get("video", False):
vs.stop()
else:
vs.release()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
91781778b2e281bd6402914cfd6ee441e7a46194 | fe17c327916695ca3f21c0f9bb85396237be3125 | /DSA_in_python/DSA_BST.py | a8068494dc2cb5cb28703d631d7f490f052d2915 | [] | no_license | tanmay6414/Python | d222255f3b4e60b42c7bed7613f11ef449ebc00e | 54659aebe0ed15f722cd469d10a42cea82f6c7f6 | refs/heads/master | 2021-07-12T18:26:59.590813 | 2020-08-20T08:15:11 | 2020-08-20T08:15:11 | 195,175,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,732 | py | class Node:
def __init__(self,value):
self.left = None
self.right = None
self.value = value
class BST:
def __init__(self):
self.root = None
def insert( self, node, value):
# If the tree is empty, return a new node
if node is None:
return Node(value)
# Otherwise recur down the tree
if value < node.value:
node.left = self.insert(node.left, value)
else:
node.right = self.insert(node.right, value)
# return the (unchanged) node pointer
return node
def inorder(self,root):
if root:
self.inorder(root.left)
print(root.value)
self.inorder(root.right)
def preorder(self,root):
if root:
print(root.value)
self.preorder(root.left)
self.preorder(root.right)
def postorder(self,root):
if root:
self.postorder(root.left)
self.preorder(root.right)
print(root.value)
def minval_node(self,node):
current = node
while(current.left is not None):
current = current.left
return current
def deleteNode(self,root,value):
if root is None:
return root
if value<root.value:
root.left = self.deleteNode(root.left,value)
elif(value > root.value):
root.right = self.deleteNode(root.right,value)
else:
if root.left is None:
temp = root.right
root = None
return temp
elif root.right is None:
temp = root.right
root = None
return temp
temp = self.minval_node(root.right)
root.value = temp.value
root.right = self.deleteNode(root.right, temp.value)
print(value," deleted")
return root
def search(self,value):
if self.root!=None:
return self._search(value,self.root)
else:
return False
def _search(self,value,node):
if value==node.value:
return True
elif value<node.value and node.left != None:
self._search(value, node.left)
elif value>node.value and node.right != None:
self._search(value, node.right)
return False
print("*"*25, "Delete Node BST", "*"*25)
root = Node(50)
s = BST()
s.insert(root,40)
s.insert(root,30)
s.insert(root,4)
s.insert(root,78)
print("\nInorder :")
s.inorder(root)
print("\nPostorder :")
s.postorder(root)
print("\nPreorder :")
s.preorder(root)
print("\n\tSearch Result :",s.search(50))
print("\n")
s.deleteNode(root,30)
print("\n")
s.preorder(root)
| [
"[email protected]"
] | |
6af1cf3dba6a860a982c23e4ab2694e620cdd26b | ea0d5f7bdd8d9874f3c14f46b01046aabda72192 | /old-tests/getnet/services/payments/credit/test_integration.py | 4b700574b8b941393b6479a83ceb23dbb30ebf93 | [
"MIT"
] | permissive | ramon/getnet-py | e948de02d0b0909905b84f7b43fa28343d124ff5 | 21daec202fe48d6ec051bf37e0c4760af8bbf25a | refs/heads/master | 2023-02-20T15:30:44.441202 | 2020-09-11T18:47:33 | 2021-02-22T16:21:02 | 201,107,637 | 1 | 4 | MIT | 2023-02-08T00:28:21 | 2019-08-07T18:38:02 | Python | UTF-8 | Python | false | false | 4,570 | py | # import os
# import unittest
#
# from vcr_unittest import VCRTestCase
#
# import getnet
# from getnet.errors.business_error import BusinessError
# from getnet.services.payments import Order, Customer
# from getnet.services.payments.credit import Service, Card, Credit
# from getnet.services.payments.credit.credit_cancel import CreditCancelResponse
# from getnet.services.payments.payment_response import PaymentResponse
# from tests.getnet.services.customers.test_customer import sample as customer_sample
# from tests.getnet.services.payments.credit.test_card import sample as card_sample
#
#
# class PaymentCreditIntegrationTest(VCRTestCase):
# def setUp(self) -> None:
# super(PaymentCreditIntegrationTest, self).setUp()
# self.client = getnet.Client(
# os.environ.get("GETNET_SELLER_ID"),
# os.environ.get("GETNET_CLIENT_ID"),
# os.environ.get("GETNET_CLIENT_SECRET"),
# getnet.client.HOMOLOG,
# )
# self.service = Service(self.client)
# self.order = Order("6d2e4380-d8a3-4ccb-9138-c289182818a3", 0, "physical_goods")
# self.customer = Customer(**customer_sample.copy())
#
# def testCreate(self):
# card_token = self.client.generate_token_card(
# "4012001037141112", "customer_21081826"
# )
# card = Card(**card_sample.copy())
# card.number_token = card_token
#
# response = self.service.create(
# amount=100,
# currency="BRL",
# order=self.order,
# customer=self.customer,
# credit=Credit(card=card),
# )
#
# self.assertIsInstance(response, PaymentResponse)
# self.assertIsNotNone(response.payment_id)
# self.assertEqual(response.status, "APPROVED")
# self.assertIsNotNone(response.credit.transaction_id)
#
# def testCreateWithInstall(self):
# card_token = self.client.generate_token_card(
# "4012001037141112", "customer_21081826"
# )
# card = Card(**card_sample.copy())
# card.number_token = card_token
#
# response = self.service.create(
# amount=40606,
# currency="BRL",
# order=self.order,
# customer=self.customer,
# credit=Credit(
# card=card, transaction_type="INSTALL_NO_INTEREST", number_installments=6
# ),
# )
#
# self.assertIsInstance(response, PaymentResponse)
# self.assertIsNotNone(response.payment_id)
# self.assertEqual(response.status, "APPROVED")
# self.assertIsNotNone(response.credit.transaction_id)
#
# def testCreateWithInvalidInstall(self):
# card_token = self.client.generate_token_card(
# "4012001037141112", "customer_21081826"
# )
# card = Card(**card_sample.copy())
# card.number_token = card_token
#
# with self.assertRaises(BusinessError) as err:
# self.service.create(
# amount=40606,
# currency="BRL",
# order=self.order,
# customer=self.customer,
# credit=Credit(
# card=card,
# transaction_type="INSTALL_WITH_INTEREST",
# number_installments=5,
# ),
# )
#
# self.assertEqual("PAYMENTS-011", err.exception.error_code)
# self.assertEqual("NOT APPROVED", err.exception.status)
#
# def testPaymentCancel(self):
# card_token = self.client.generate_token_card(
# "4012001037141112", "customer_21081826"
# )
# card = Card(**card_sample.copy())
# card.number_token = card_token
#
# response = self.service.create(
# amount=100,
# currency="BRL",
# order=self.order,
# customer=self.customer,
# credit=Credit(card=card),
# )
#
# self.assertIsInstance(response, PaymentResponse)
# self.assertIsNotNone(response.payment_id)
# self.assertEqual(response.status, "APPROVED")
#
# cancel_response = self.service.cancel(response.payment_id)
# self.assertEqual(cancel_response.status, "CANCELED")
# self.assertIsInstance(cancel_response.credit_cancel, CreditCancelResponse)
# self.assertEqual(
# cancel_response.credit_cancel.message,
# "Credit transaction cancelled successfully",
# )
#
#
# if __name__ == "__main__":
# unittest.main()
| [
"[email protected]"
] | |
808ac7632e66327e3f8d1fe634dab41d619f065e | 786de89be635eb21295070a6a3452f3a7fe6712c | /CorAna/tags/V00-00-04/src/ConfigParametersCorAna.py | 8baf5f326ca6758d621cc3f9f8cf43ac75c28720 | [] | no_license | connectthefuture/psdmrepo | 85267cfe8d54564f99e17035efe931077c8f7a37 | f32870a987a7493e7bf0f0a5c1712a5a030ef199 | refs/heads/master | 2021-01-13T03:26:35.494026 | 2015-09-03T22:22:11 | 2015-09-03T22:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,606 | py | #--------------------------------------------------------------------------
# File and Version Information:
# $Id$
#
# Description:
# Module ConfigParametersCorAna...
#
#------------------------------------------------------------------------
"""Is intended as a storage for configuration parameters for CorAna project.
This software was developed for the LCLS project. If you use all or
part of it, please give an appropriate acknowledgment.
@version $Id: template!python!py 4 2008-10-08 19:27:36Z salnikov $
@author Mikhail S. Dubrovin
"""
#------------------------------
# Module's version from CVS --
#------------------------------
__version__ = "$Revision: 4 $"
# $Source$
#--------------------------------
# Imports of standard modules --
#--------------------------------
import sys
import os
from copy import deepcopy
#-----------------------------
# Imports for other modules --
#-----------------------------
#import ConfigParameters as cpbase
from ConfigParameters import * # ConfigParameters
from Logger import logger
from PyQt4 import QtGui # for icons only...
import AppDataPath as apputils # for icons
#---------------------
# Class definition --
#---------------------
class ConfigParametersCorAna ( ConfigParameters ) :
"""Is intended as a storage for configuration parameters for CorAna project.
#@see BaseClass ConfigParameters
#@see OtherClass Parameters
"""
list_pars = []
def __init__ ( self, fname=None ) :
"""Constructor.
@param fname the file name with configuration parameters, if not specified then it will be set to the default value at declaration.
"""
ConfigParameters.__init__(self)
self.declareCorAnaParameters()
self.readParametersFromFile ( fname )
self.initRunTimeParameters()
self.defineStyles()
def initRunTimeParameters( self ) :
self.char_expand = u' \u25BE' # down-head triangle
self.iconsAreLoaded = False
self.plotarray_is_on = False
self.plotg2_is_on = False
self.autoRunStatus = 0 # 0=inctive, 1=split, 2=process, 3=merge
#self.plotimgspe = None
self.plotimgspe_g = None
#-----------------------------
def setIcons(self) :
if self.iconsAreLoaded : return
self.iconsAreLoaded = True
path_icon_contents = apputils.AppDataPath('CorAna/icons/contents.png').path()
path_icon_mail_forward = apputils.AppDataPath('CorAna/icons/mail-forward.png').path()
path_icon_button_ok = apputils.AppDataPath('CorAna/icons/button_ok.png').path()
path_icon_button_cancel = apputils.AppDataPath('CorAna/icons/button_cancel.png').path()
path_icon_exit = apputils.AppDataPath('CorAna/icons/exit.png').path()
path_icon_home = apputils.AppDataPath('CorAna/icons/home.png').path()
path_icon_redo = apputils.AppDataPath('CorAna/icons/redo.png').path()
path_icon_undo = apputils.AppDataPath('CorAna/icons/undo.png').path()
path_icon_reload = apputils.AppDataPath('CorAna/icons/reload.png').path()
path_icon_save = apputils.AppDataPath('CorAna/icons/save.png').path()
path_icon_save_cfg = apputils.AppDataPath('CorAna/icons/fileexport.png').path()
path_icon_edit = apputils.AppDataPath('CorAna/icons/edit.png').path()
path_icon_browser = apputils.AppDataPath('CorAna/icons/fileopen.png').path()
path_icon_monitor = apputils.AppDataPath('CorAna/icons/icon-monitor.png').path()
path_icon_unknown = apputils.AppDataPath('CorAna/icons/icon-unknown.png').path()
path_icon_logviewer = apputils.AppDataPath('CorAna/icons/logviewer.png').path()
path_icon_locked = apputils.AppDataPath('CorAna/icons/locked-icon.png').path()
path_icon_unlocked = apputils.AppDataPath('CorAna/icons/unlocked-icon.png').path()
self.icon_contents = QtGui.QIcon(path_icon_contents )
self.icon_mail_forward = QtGui.QIcon(path_icon_mail_forward)
self.icon_button_ok = QtGui.QIcon(path_icon_button_ok)
self.icon_button_cancel = QtGui.QIcon(path_icon_button_cancel)
self.icon_exit = QtGui.QIcon(path_icon_exit )
self.icon_home = QtGui.QIcon(path_icon_home )
self.icon_redo = QtGui.QIcon(path_icon_redo )
self.icon_undo = QtGui.QIcon(path_icon_undo )
self.icon_reload = QtGui.QIcon(path_icon_reload )
self.icon_save = QtGui.QIcon(path_icon_save )
self.icon_save_cfg = QtGui.QIcon(path_icon_save_cfg )
self.icon_edit = QtGui.QIcon(path_icon_edit )
self.icon_browser = QtGui.QIcon(path_icon_browser )
self.icon_monitor = QtGui.QIcon(path_icon_monitor )
self.icon_unknown = QtGui.QIcon(path_icon_unknown )
self.icon_logviewer = QtGui.QIcon(path_icon_logviewer)
self.icon_lock = QtGui.QIcon(path_icon_locked )
self.icon_unlock = QtGui.QIcon(path_icon_unlocked )
#base_dir = '/usr/share/icons/Bluecurve/24x24/'
#self.icon_contents = QtGui.QIcon(base_dir + 'actions/contents.png')
#self.icon_mail_forward = QtGui.QIcon(base_dir + '../../gnome/24x24/actions/mail-forward.png')
#self.icon_button_ok = QtGui.QIcon(base_dir + 'actions/button_ok.png')
#self.icon_button_cancel = QtGui.QIcon(base_dir + 'actions/button_cancel.png')
#self.icon_exit = QtGui.QIcon(base_dir + 'actions/exit.png')
#self.icon_home = QtGui.QIcon(base_dir + 'actions/gohome.png')
#self.icon_redo = QtGui.QIcon(base_dir + 'actions/redo.png')
#self.icon_undo = QtGui.QIcon(base_dir + 'actions/undo.png')
#self.icon_reload = QtGui.QIcon(base_dir + 'actions/reload.png')
#self.icon_stop = QtGui.QIcon(base_dir + 'actions/stop.png')
#self.icon_save_cfg = QtGui.QIcon(base_dir + 'actions/fileexport.png')
#self.icon_save = QtGui.QIcon(base_dir + 'stock/stock-save.png')
#self.icon_edit = QtGui.QIcon(base_dir + 'actions/edit.png')
#self.icon_browser = QtGui.QIcon(base_dir + 'actions/fileopen.png')
#self.icon_monitor = QtGui.QIcon(base_dir + 'apps/icon-monitor.png')
#self.icon_unknown = QtGui.QIcon(base_dir + 'apps/icon-unknown.png')
#self.icon_logviewer = QtGui.QIcon(base_dir + '../32x32/apps/logviewer.png')
self.icon_logger = self.icon_edit
self.icon_help = self.icon_unknown
self.icon_reset = self.icon_reload
#-----------------------------
def declareCorAnaParameters( self ) :
# Possible typs for declaration : 'str', 'int', 'long', 'float', 'bool'
# GUIInstrExpRun.py.py
# self.fname_cp = self.declareParameter( name='FNAME_CONFIG_PARS', val_def='confpars.txt', type='str' )
# self.fname_ped = self.declareParameter( name='FNAME_PEDESTALS', val_def='my_ped.txt', type='str' )
# self.fname_dat = self.declareParameter( name='FNAME_DATA', val_def='my_dat.txt', type='str' )
# self.instr_dir = self.declareParameter( name='INSTRUMENT_DIR', val_def='/reg/d/psdm', type='str' )
# self.instr_name = self.declareParameter( name='INSTRUMENT_NAME', val_def='XCS', type='str' )
# self.exp_name = self.declareParameter( name='EXPERIMENT_NAME', val_def='xcsi0112', type='str' )
# self.str_run_number = self.declareParameter( name='RUN_NUMBER', val_def='0015', type='str' )
# self.str_run_number_dark= self.declareParameter( name='RUN_NUMBER_DARK', val_def='0014', type='str' )
# GUIMainTB.py
# GUIMainSplit.py
self.current_tab = self.declareParameter( name='CURRENT_TAB' , val_def='Files', type='str' )
# GUILogger.py
self.log_level = self.declareParameter( name='LOG_LEVEL_OF_MSGS', val_def='info', type='str' )
# GUIFiles.py
self.current_file_tab = self.declareParameter( name='CURRENT_FILE_TAB' , val_def='Work/Results', type='str' )
# GUIRun.py
self.current_run_tab = self.declareParameter( name='CURRENT_RUN_TAB' , val_def='Input', type='str' )
# GUIWorkResDirs.py
self.dir_work = self.declareParameter( name='DIRECTORY_WORK', val_def='./work', type='str' )
self.dir_results = self.declareParameter( name='DIRECTORY_RESULTS', val_def='./results', type='str' )
self.fname_prefix = self.declareParameter( name='FILE_NAME_PREFIX', val_def='cora-', type='str' )
self.fname_prefix_cora = self.declareParameter( name='FILE_NAME_PREFIX_CORA', val_def='cora-proc', type='str' )
# GUIDark.py
self.use_dark_xtc_all = self.declareParameter( name='USE_DARK_XTC_ALL_CHUNKS', val_def=True, type='bool' )
self.in_dir_dark = self.declareParameter( name='IN_DIRECTORY_DARK', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
self.in_file_dark = self.declareParameter( name='IN_FILE_NAME_DARK', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUIFlatField.py
self.ccdcorr_flatfield = self.declareParameter( name='CCD_CORRECTION_FLATFIELD', val_def=False, type='bool' )
self.dname_flat = self.declareParameter( name='DIRECTORY_FLAT', val_def='.',type='str' )
self.fname_flat = self.declareParameter( name='FILE_NAME_FLAT', val_def='flat_field.txt',type='str' )
#self.in_dir_flat = self.declareParameter( name='IN_DIRECTORY_FLAT', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
#self.in_file_flat = self.declareParameter( name='IN_FILE_NAME_FLAT', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUIBlemish.py
self.ccdcorr_blemish = self.declareParameter( name='CCD_CORRECTION_BLEMISH', val_def=False, type='bool' )
self.dname_blem = self.declareParameter( name='DIRECTORY_BLEM', val_def='.',type='str' )
self.fname_blem = self.declareParameter( name='FILE_NAME_BLEM', val_def='blemish.txt',type='str' )
#self.in_dir_blem = self.declareParameter( name='IN_DIRECTORY_BLEM', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
#self.in_file_blem = self.declareParameter( name='IN_FILE_NAME_BLEM', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUIData.py
self.use_data_xtc_all = self.declareParameter( name='USE_DATA_XTC_ALL_CHUNKS', val_def=True, type='bool' )
self.is_active_data_gui = self.declareParameter( name='IS_ACTIVE_DATA_GUI', val_def=True, type='bool' )
self.in_dir_data = self.declareParameter( name='IN_DIRECTORY_DATA', val_def='/reg/d/psdm/XCS/xcsi0112/xtc',type='str' )
self.in_file_data = self.declareParameter( name='IN_FILE_NAME_DATA', val_def='e167-r0020-s00-c00.xtc',type='str' )
# GUISetupBeamZero.py
self.x_coord_beam0 = self.declareParameter( name='X_COORDINATE_BEAM_ZERO', val_def=1234.5, type='float' )
self.y_coord_beam0 = self.declareParameter( name='Y_COORDINATE_BEAM_ZERO', val_def=1216.5, type='float' )
self.x0_pos_in_beam0 = self.declareParameter( name='X_CCD_POS_IN_BEAM_ZERO', val_def=-59, type='float' )
self.y0_pos_in_beam0 = self.declareParameter( name='Y_CCD_POS_IN_BEAM_ZERO', val_def=175, type='float' )
# GUISetupSpecular.py
self.x_coord_specular = self.declareParameter( name='X_COORDINATE_SPECULAR', val_def=-1, type='float' )
self.y_coord_specular = self.declareParameter( name='Y_COORDINATE_SPECULAR', val_def=-2, type='float' )
self.x0_pos_in_specular = self.declareParameter( name='X_CCD_POS_IN_SPECULAR', val_def=-3, type='float' )
self.y0_pos_in_specular = self.declareParameter( name='Y_CCD_POS_IN_SPECULAR', val_def=-4, type='float' )
# GUISetupData.py
self.x0_pos_in_data = self.declareParameter( name='X_CCD_POS_IN_DATA', val_def=-51, type='float' )
self.y0_pos_in_data = self.declareParameter( name='Y_CCD_POS_IN_DATA', val_def=183, type='float' )
# GUISetupInfoLeft.py
self.sample_det_dist = self.declareParameter( name='SAMPLE_TO_DETECTOR_DISTANCE', val_def=4000.1, type='float' )
self.exp_setup_geom = self.declareParameter( name='EXP_SETUP_GEOMETRY', val_def='Baem Zero', type='str' )
self.photon_energy = self.declareParameter( name='PHOTON_ENERGY', val_def=7.6543, type='float' )
self.nominal_angle = self.declareParameter( name='NOMINAL_ANGLE', val_def=-1, type='float' )
self.real_angle = self.declareParameter( name='REAL_ANGLE', val_def=-1, type='float' )
# GUIImgSizePosition.py
self.col_begin = self.declareParameter( name='IMG_COL_BEGIN', val_def=0, type='int' )
self.col_end = self.declareParameter( name='IMG_COL_END', val_def=1339, type='int' )
self.row_begin = self.declareParameter( name='IMG_ROW_BEGIN', val_def=1, type='int' )
self.row_end = self.declareParameter( name='IMG_ROW_END', val_def=1299, type='int' )
# GUIKineticMode.py
self.kin_mode = self.declareParameter( name='KINETICS_MODE', val_def='Non-Kinetics',type='str' )
self.kin_win_size = self.declareParameter( name='KINETICS_WIN_SIZE', val_def=1, type='int' )
self.kin_top_row = self.declareParameter( name='KINETICS_TOP_ROW', val_def=2, type='int' )
self.kin_slice_first = self.declareParameter( name='KINETICS_SLICE_FIRST', val_def=3, type='int' )
self.kin_slice_last = self.declareParameter( name='KINETICS_SLICE_LAST', val_def=4, type='int' )
# GUISetupPars.py
self.bat_num = self.declareParameter( name='BATCH_NUM', val_def= 1, type='int' )
self.bat_num_max = self.declareParameter( name='BATCH_NUM_MAX', val_def= 9, type='int' )
#self.bat_data_is_used = self.declareParameter( name='BATCH_DATA_IS_USED', val_def=True, type='bool' )
self.bat_data_start = self.declareParameter( name='BATCH_DATA_START', val_def= 1, type='int' )
self.bat_data_end = self.declareParameter( name='BATCH_DATA_END' , val_def=-1, type='int' )
self.bat_data_total = self.declareParameter( name='BATCH_DATA_TOTAL', val_def=-1, type='int' )
self.bat_data_time = self.declareParameter( name='BATCH_DATA_TIME' , val_def=-1.0, type='float' )
self.bat_data_dt_ave = self.declareParameter( name='BATCH_DATA_DT_AVE', val_def=-1.0, type='float' )
self.bat_data_dt_rms = self.declareParameter( name='BATCH_DATA_DT_RMS', val_def=0.0, type='float' )
self.bat_dark_is_used = self.declareParameter( name='BATCH_DARK_IS_USED', val_def=True, type='bool' )
self.bat_dark_start = self.declareParameter( name='BATCH_DARK_START', val_def= 1, type='int' )
self.bat_dark_end = self.declareParameter( name='BATCH_DARK_END' , val_def=-1, type='int' )
self.bat_dark_total = self.declareParameter( name='BATCH_DARK_TOTAL', val_def=-1, type='int' )
self.bat_dark_time = self.declareParameter( name='BATCH_DARK_TIME' , val_def=-1.0, type='float' )
self.bat_dark_dt_ave = self.declareParameter( name='BATCH_DARK_DT_AVE', val_def=-1.0, type='float' )
self.bat_dark_dt_rms = self.declareParameter( name='BATCH_DARK_DT_RMS', val_def=0.0, type='float' )
#self.bat_flat_is_used = self.declareParameter( name='BATCH_FLAT_IS_USED', val_def=True, type='bool' )
self.bat_flat_start = self.declareParameter( name='BATCH_FLAT_START', val_def= 1, type='int' )
self.bat_flat_end = self.declareParameter( name='BATCH_FLAT_END' , val_def=-1, type='int' )
self.bat_flat_total = self.declareParameter( name='BATCH_FLAT_TOTAL', val_def=-1, type='int' )
self.bat_flat_time = self.declareParameter( name='BATCH_FLAT_TIME' , val_def=-1.0, type='float' )
self.bat_queue = self.declareParameter( name='BATCH_QUEUE', val_def='psfehq', type='str' )
self.bat_det_info = self.declareParameter( name='BATCH_DET_INFO', val_def='DetInfo(:Princeton)', type='str' )
#self.bat_det_info = self.declareParameter( name='BATCH_DET_INFO', val_def='DetInfo(XcsBeamline.0:Princeton.0)', type='str' )
self.bat_img_rec_mod = self.declareParameter( name='BATCH_IMG_REC_MODULE', val_def='ImgAlgos.PrincetonImageProducer', type='str' )
# BatchLogParser.py
self.bat_img_rows = self.declareParameter( name='BATCH_IMG_ROWS', val_def= -1, type='int' )
self.bat_img_cols = self.declareParameter( name='BATCH_IMG_COLS', val_def= -1, type='int' )
self.bat_img_size = self.declareParameter( name='BATCH_IMG_SIZE', val_def= -1, type='int' )
self.bat_img_nparts = self.declareParameter( name='BATCH_IMG_NPARTS', val_def= 8, type='int' )
# GUIAnaSettingsLeft.py
self.ana_type = self.declareParameter( name='ANA_TYPE', val_def='Static',type='str' )
self.ana_stat_meth_q = self.declareParameter( name='ANA_STATIC_METHOD_Q', val_def='evenly-spaced',type='str' )
self.ana_stat_meth_phi = self.declareParameter( name='ANA_STATIC_METHOD_PHI', val_def='evenly-spaced',type='str' )
self.ana_dyna_meth_q = self.declareParameter( name='ANA_DYNAMIC_METHOD_Q', val_def='evenly-spaced',type='str' )
self.ana_dyna_meth_phi = self.declareParameter( name='ANA_DYNAMIC_METHOD_PHI', val_def='evenly-spaced',type='str' )
self.ana_stat_part_q = self.declareParameter( name='ANA_STATIC_PARTITION_Q', val_def='1',type='str' )
self.ana_stat_part_phi = self.declareParameter( name='ANA_STATIC_PARTITION_PHI', val_def='2',type='str' )
self.ana_dyna_part_q = self.declareParameter( name='ANA_DYNAMIC_PARTITION_Q', val_def='3',type='str' )
self.ana_dyna_part_phi = self.declareParameter( name='ANA_DYNAMIC_PARTITION_PHI', val_def='4',type='str' )
self.ana_mask_type = self.declareParameter( name='ANA_MASK_TYPE', val_def='no-mask',type='str' )
self.ana_mask_fname = self.declareParameter( name='ANA_MASK_FILE', val_def='./roi-mask.txt',type='str' )
self.ana_mask_dname = self.declareParameter( name='ANA_MASK_DIRECTORY', val_def='.',type='str' )
# GUIAnaSettingsRight.py
self.ana_ndelays = self.declareParameter( name='ANA_NDELAYS_PER_MTAU_LEVEL', val_def=4, type='int' )
self.ana_nslice_delays = self.declareParameter( name='ANA_NSLICE_DELAYS_PER_MTAU_LEVEL', val_def=4, type='int' )
self.ana_npix_to_smooth= self.declareParameter( name='ANA_NPIXELS_TO_SMOOTH', val_def=100, type='int' )
self.ana_smooth_norm = self.declareParameter( name='ANA_SMOOTH_SYM_NORM', val_def=False, type='bool' )
self.ana_two_corfuns = self.declareParameter( name='ANA_TWO_TIME_CORFUNS_CONTROL', val_def=False, type='bool' )
self.ana_spec_stab = self.declareParameter( name='ANA_CHECK_SPECKLE_STABILITY', val_def=False, type='bool' )
self.lld_type = self.declareParameter( name='LOW_LEVEL_DISC_TYPE', val_def='NONE',type='str' )
self.lld_adu = self.declareParameter( name='LOW_LEVEL_DISC_ADU', val_def=15, type='float' )
self.lld_rms = self.declareParameter( name='LOW_LEVEL_DISC_RMS', val_def=4, type='float' )
self.res_ascii_out = self.declareParameter( name='RES_ASCII_OUTPUT', val_def=True, type='bool' )
self.res_fit1 = self.declareParameter( name='RES_PERFORM_FIT1', val_def=False, type='bool' )
self.res_fit2 = self.declareParameter( name='RES_PERFORM_FIT1', val_def=False, type='bool' )
self.res_fit_cust = self.declareParameter( name='RES_PERFORM_FIT_CUSTOM', val_def=False, type='bool' )
self.res_png_out = self.declareParameter( name='RES_PNG_FILES', val_def=False, type='bool' )
self.res_save_log = self.declareParameter( name='RES_SAVE_LOG_FILE', val_def=False, type='bool' )
# GUILoadResults.py
self.res_load_mode = self.declareParameter( name='RES_LOAD_MODE', val_def='NONE',type='str' )
self.res_fname = self.declareParameter( name='RES_LOAD_FNAME', val_def='NONE',type='str' )
# GUISystemSettingsRight.py
self.thickness_type = self.declareParameter( name='THICKNESS_TYPE', val_def='NONORM',type='str' )
self.thickness_sample = self.declareParameter( name='THICKNESS_OF_SAMPLE', val_def=-1, type='float' )
self.thickness_attlen = self.declareParameter( name='THICKNESS_ATTENUATION_LENGTH', val_def=-2, type='float' )
self.ccd_orient = self.declareParameter( name='CCD_ORIENTATION', val_def='180', type='str' )
self.y_is_flip = self.declareParameter( name='Y_IS_FLIPPED', val_def='True', type='bool' )
# GUICCDSettings.py
self.ccdset_pixsize = self.declareParameter( name='CCD_SETTINGS_PIXEL_SIZE', val_def=0.1, type='float' )
self.ccdset_adcsatu = self.declareParameter( name='CCD_SETTINGS_ADC_SATTURATION', val_def=12345, type='int' )
self.ccdset_aduphot = self.declareParameter( name='CCD_SETTINGS_ADU_PER_PHOTON', val_def=123, type='float' )
self.ccdset_ccdeff = self.declareParameter( name='CCD_SETTINGS_EFFICIENCY', val_def=0.55, type='float' )
self.ccdset_ccdgain = self.declareParameter( name='CCD_SETTINGS_GAIN', val_def=0.8, type='float' )
# GUIELogPostingDialog.py
# GUIELogPostingFields.py
#self.elog_post_cbx_state = self.declareParameter( name='ELOG_POST_CBX_STATE', val_def=True, type='bool' )
self.elog_post_rad = self.declareParameter( name='ELOG_POST_RAD_STATE', val_def='Default', type='str' )
self.elog_post_ins = self.declareParameter( name='ELOG_POST_INSTRUMENT', val_def='AMO', type='str' )
self.elog_post_exp = self.declareParameter( name='ELOG_POST_EXPERIMENT', val_def='amodaq09', type='str' )
self.elog_post_run = self.declareParameter( name='ELOG_POST_RUN', val_def='825', type='str' )
self.elog_post_tag = self.declareParameter( name='ELOG_POST_TAG', val_def='TAG1', type='str' )
self.elog_post_res = self.declareParameter( name='ELOG_POST_RESPONCE', val_def='None', type='str' )
self.elog_post_msg = self.declareParameter( name='ELOG_POST_MESSAGE', val_def='EMPTY MSG', type='str' )
self.elog_post_att = self.declareParameter( name='ELOG_POST_ATTACHED_FILE', val_def='None', type='str' )
#GUIViewControl.py
self.vc_cbx_show_more = self.declareParameter( name='SHOW_MORE_BUTTONS', val_def=True, type='bool' )
#-----------------------------
imon_names = [ ('BldInfo(FEEGasDetEnergy)', None ,'str'), \
('BldInfo(XCS-IPM-02)', None ,'str'), \
('BldInfo(XCS-IPM-mono)', None ,'str'), \
('DetInfo(XcsBeamline.1:Ipimb.4)', None ,'str'), \
('DetInfo(XcsBeamline.1:Ipimb.5)', None ,'str') ]
self.imon_name_list = self.declareListOfPars( 'IMON_NAMES', imon_names )
#-----------------------------
imon_short_names = [ ('FEEGasDetEnergy', None ,'str'), \
('XCS-IPM-02', None ,'str'), \
('XCS-IPM-mono', None ,'str'), \
('Ipimb.4', None ,'str'), \
('Ipimb.5', None ,'str') ]
self.imon_short_name_list = self.declareListOfPars( 'IMON_SHORT_NAMES', imon_short_names )
#-----------------------------
imon_cbxs = [ (True, True ,'bool'), \
(True, True ,'bool'), \
(True, True ,'bool'), \
(True, True ,'bool'), \
(True, True ,'bool') ]
self.imon_ch1_list = self.declareListOfPars( 'IMON_CH1', deepcopy(imon_cbxs) )
self.imon_ch2_list = self.declareListOfPars( 'IMON_CH2', deepcopy(imon_cbxs) )
self.imon_ch3_list = self.declareListOfPars( 'IMON_CH3', deepcopy(imon_cbxs) )
self.imon_ch4_list = self.declareListOfPars( 'IMON_CH4', deepcopy(imon_cbxs) )
#-----------------------------
imon_norm_cbx = [ (False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool') ]
self.imon_norm_cbx_list = self.declareListOfPars( 'IMON_NORM_CBX', imon_norm_cbx )
#-----------------------------
imon_sele_cbx = [ (False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool') ]
self.imon_sele_cbx_list = self.declareListOfPars( 'IMON_SELE_CBX', imon_sele_cbx )
#-----------------------------
imon_sele_min = [ (-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float') ]
self.imon_sele_min_list = self.declareListOfPars( 'IMON_SELE_MIN', imon_sele_min )
#-----------------------------
imon_sele_max = [ (-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float'), \
(-1., -1. ,'float') ]
self.imon_sele_max_list = self.declareListOfPars( 'IMON_SELE_MAX', imon_sele_max )
#-----------------------------
self.imon_pars_list = zip( self.imon_name_list,
self.imon_ch1_list,
self.imon_ch2_list,
self.imon_ch3_list,
self.imon_ch4_list,
self.imon_norm_cbx_list,
self.imon_sele_cbx_list,
self.imon_sele_min_list,
self.imon_sele_max_list,
self.imon_short_name_list )
#print self.imon_pars_list
#-----------------------------
def defineStyles( self ) :
self.styleYellowish = "background-color: rgb(255, 255, 220); color: rgb(0, 0, 0);" # Yellowish
self.stylePink = "background-color: rgb(255, 200, 220); color: rgb(0, 0, 0);" # Pinkish
self.styleYellowBkg = "background-color: rgb(255, 255, 120); color: rgb(0, 0, 0);" # Pinkish
self.styleGray = "background-color: rgb(230, 240, 230); color: rgb(0, 0, 0);" # Gray
self.styleGreenish = "background-color: rgb(100, 255, 200); color: rgb(0, 0, 0);" # Greenish
self.styleGreenPure = "background-color: rgb(150, 255, 150); color: rgb(0, 0, 0);" # Green
self.styleBluish = "background-color: rgb(200, 200, 255); color: rgb(0, 0, 0);" # Bluish
self.styleWhite = "background-color: rgb(255, 255, 255); color: rgb(0, 0, 0);"
self.styleRedBkgd = "background-color: rgb(255, 0, 0); color: rgb(0, 0, 0);" # Red background
#self.styleTitle = "background-color: rgb(239, 235, 231, 255); color: rgb(100, 160, 100);" # Gray bkgd
#self.styleTitle = "color: rgb(150, 160, 100);"
self.styleBlue = "color: rgb(000, 000, 255);"
self.styleBuriy = "color: rgb(150, 100, 50);"
self.styleRed = "color: rgb(255, 0, 0);"
self.styleGreen = "color: rgb(0, 150, 0);"
self.styleYellow = "color: rgb(0, 150, 150);"
self.styleBkgd = self.styleYellowish
self.styleTitle = self.styleBuriy
self.styleLabel = self.styleBlue
self.styleEdit = self.styleWhite
self.styleEditInfo = self.styleGreenish
self.styleEditBad = self.styleRedBkgd
self.styleButton = self.styleGray
self.styleButtonOn = self.styleBluish
self.styleButtonClose = self.stylePink
self.styleButtonWarning= self.styleYellowBkg
self.styleButtonGood = self.styleGreenPure
self.styleButtonBad = self.stylePink
self.styleBox = self.styleGray
self.styleCBox = self.styleYellowish
self.styleStatusGood = self.styleGreen
self.styleStatusWarning= self.styleYellow
self.styleStatusAlarm = self.styleRed
self.styleTitleBold = self.styleTitle + 'font-size: 18pt; font-family: Courier; font-weight: bold;'
self.styleWhiteFixed = self.styleWhite + 'font-family: Fixed;'
self.colorEditInfo = QtGui.QColor(100, 255, 200)
self.colorEditBad = QtGui.QColor(255, 0, 0)
self.colorEdit = QtGui.QColor('white')
def printParsDirectly( self ) :
logger.info('Direct use of parameter:' + self.fname_cp .name() + ' ' + self.fname_cp .value(), __name__ )
logger.info('Direct use of parameter:' + self.fname_ped.name() + ' ' + self.fname_ped.value(), __name__ )
logger.info('Direct use of parameter:' + self.fname_dat.name() + ' ' + self.fname_dat.value(), __name__ )
#-----------------------------
confpars = ConfigParametersCorAna (fname=getConfigFileFromInput())
#-----------------------------
#
# In case someone decides to run this module
#
if __name__ == "__main__" :
confpars.printParameters()
#confpars.printParsDirectly()
confpars.saveParametersInFile()
confpars.printListOfPars('IMON_NAMES')
sys.exit ( 'End of test for ConfigParametersCorAna' )
#-----------------------------
| [
"[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7"
] | [email protected]@b967ad99-d558-0410-b138-e0f6c56caec7 |
a596a614510d11ede4340b4695cea0deb8bad9bc | 1b7da2f11e509828476e21ca665279602de7b509 | /portal/users/views.py | 9f0eafe0a150452c6d1f3b231cf0664a25cd79c7 | [] | no_license | rosenene/oap | 922f3955e4f3a583e6829eed0d518f2c7f806d58 | 32598b7d6c9d6677c889258f21752878ad30d0a5 | refs/heads/master | 2022-04-26T20:31:37.850145 | 2020-04-16T07:47:19 | 2020-04-16T07:47:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,149 | py | from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import DetailView, RedirectView, UpdateView
from django.shortcuts import redirect
User = get_user_model()
# def login_spana(request):
# # return reverse("account_signup")
# return redirect("/university/")
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
slug_field = "username"
slug_url_kwarg = "username"
user_detail_view = UserDetailView.as_view()
class UserUpdateView(LoginRequiredMixin, UpdateView):
model = User
fields = ["name"]
def get_success_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
def get_object(self):
return User.objects.get(username=self.request.user.username)
def form_valid(self, form):
messages.add_message(
self.request, messages.INFO, _("Infos successfully updated")
)
return super().form_valid(form)
user_update_view = UserUpdateView.as_view()
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
print(self.request.user.username)
type_of_applicant = "pt_student"
if is_member(self.request.user, "pt_student"):
type_of_applicant = "pt_student"
# return redirect('cv_registration.views.personalinfo')
if is_member(self.request.user, "researcher"):
type_of_applicant = "researcher"
# return redirect('cv_registration.views.personalinfo')
if is_member(self.request.user, "mentor"):
type_of_applicant = "mentor"
# return redirect('cv_registration.views.personalinfo')
if is_member(self.request.user, "university_agent"):
type_of_applicant = "university_agent"
# return redirect('university_regulator.views.home')
if is_member(self.request.user, "main_staff"):
type_of_applicant = "main_staff"
# return redirect('ega_org.views.home')
return reverse('users:user_redirect_to_page', kwargs={"applicant_group": type_of_applicant})
# return reverse("users:detail", kwargs={"username": self.request.user.username})
user_redirect_view = UserRedirectView.as_view()
def user_redirect_to_page(request, **kwargs):
print(kwargs["applicant_group"])
if kwargs["applicant_group"] == "pt_student":
return redirect("/info_registration/")
if kwargs["applicant_group"] == "mentors":
return redirect("/info_registration/")
if kwargs["applicant_group"] == "researcher":
return redirect("/info_registration/")
if kwargs["applicant_group"] == "main_staff":
return redirect("/ega/")
if kwargs["applicant_group"] == "university_agent":
return redirect("/university/")
# return redirect("account_logout")
def is_member(user, user_group):
return user.groups.filter(name=user_group).exists()
| [
"[email protected]"
] | |
d2e87a92532b10bf361c5d37cdd5fe4f539f0bad | 2ffdb504df70b5bb80c9153b36a44aaa69bc75a5 | /flock/apps.py | 40f89336f10f1540746cf47016db55696992fa53 | [] | no_license | LiamDGray/xerocraft-django | 6101893e1157ae7627e2a9c9bb95e8d6c8423468 | 52bf553a79ffea3f7ad23405cec231dea3c889a9 | refs/heads/master | 2020-03-21T17:07:45.906991 | 2018-06-29T01:24:48 | 2018-06-29T01:24:48 | 138,814,812 | 0 | 0 | null | 2018-06-29T01:18:51 | 2018-06-27T01:43:38 | Python | UTF-8 | Python | false | false | 261 | py | from django.apps import AppConfig
class FlockAppConfig(AppConfig):
name = 'flock'
isready = False
def ready(self):
if self.isready:
return
else:
self.isready = True
import flock.signals.handlers | [
"[email protected]"
] | |
03836c530d5c92497a2237b4a6a42a4ffbe8e4ab | 31f0f37b0c11fddc6a482857215b62d27c36328a | /Fashi/shopkeeper/models.py | 3d08745e93a0c42b5b6a693610aee2d3f1b6a290 | [] | no_license | Priyanshu1012/E-commerce-platform | b25afc1f229788b1375a3bcfaefb01bf167113df | 811251d32f59875e8cfdbaf333938e982c7bcf36 | refs/heads/main | 2023-03-17T10:18:15.433600 | 2021-03-18T04:26:56 | 2021-03-18T04:26:56 | 302,699,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,200 | py | from django.db import models
class Items(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
type = models.CharField(max_length=30)
price = models.DecimalField(max_digits=10, decimal_places=2)
image = models.ImageField(upload_to='static/images')
# Create your models here.
class Register(models.Model):
id = models.AutoField(primary_key=True)
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=30)
email_address = models.EmailField(max_length=100)
password = models.CharField(max_length=100)
confirm_password = models.CharField(max_length=100)
class Contact(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
email = models.EmailField(max_length=100)
phonenumber = models.IntegerField()
message = models.CharField(max_length=200)
class Cart(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
quantity = models.IntegerField()
price = models.FloatField()
total_price = models.FloatField()
image_cart = models.ImageField(upload_to='static/images')
| [
"[email protected]"
] | |
f6254d023de7431ce2d8903bbd1ccd076fa395cd | 707aee3cf3bf97c3d1145ad588f9bcdcf7075df5 | /week6/linear_regression_with_tensorboard.py | 6ac2ff2077d2c579daf2954b0752f9bf0271b4a5 | [] | no_license | zerogau/dl_cv_tensorflow_10weeks | abce18a738a0dc2b960cda9e911443989da3cae1 | b912a3556ab117e76dd109e951e7bf8c6b1a5c37 | refs/heads/master | 2022-08-27T06:21:03.849544 | 2019-08-28T13:27:50 | 2019-08-28T13:27:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,916 | py | # -*- coding: utf-8 -*-
import tensorflow as tf
# 선형회귀 모델(Wx + b)을 정의합니다.
W = tf.Variable(tf.random_normal([1]), name="W")
b = tf.Variable(tf.random_normal([1]), name="b")
x = tf.placeholder(tf.float32, name="x")
linear_model = W*x + b
# 찹값을 위한 플레이스홀더를 정의합니다.
y = tf.placeholder(tf.float32, name="y")
# 손실 함수를 정의합니다.
loss = tf.reduce_sum(tf.square(linear_model - y)) # sum of the squares \sum{(y' - y)^2}
# 텐서보드를 위한 요약정보(scalar)를 정의합니다.
tf.summary.scalar('loss', loss)
# 최적화를 위한 옵티마이저를 정의합니다.
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = optimizer.minimize(loss)
# 트레이닝을 위한 입력값과 출력값을 준비합니다.
x_train = [1, 2, 3, 4]
y_train = [2, 4, 6, 8]
# 파라미터(W,b)를 noraml distirubtion에서 추출한 임의의 값으로 초기화 합니다.
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
# 텐서보드 요약정보들을 하나로 합칩니다.
merged = tf.summary.merge_all()
# 텐서보드 summary 정보들을 저장할 폴더 경로를 설정합니다.
tensorboard_writer = tf.summary.FileWriter('./tensorboard_log', sess.graph)
# 경사하강법을 1000번 수행합니다.
for i in range(1000):
sess.run(train, {x: x_train, y: y_train})
# 매스텝마다 텐서보드 요약정보값들을 계산해서 지정된 경로('./tensorboard_log')에 저장합니다.
summary = sess.run(merged, feed_dict={x: x_train, y: y_train})
tensorboard_writer.add_summary(summary, i)
# 테스트를 위한 입력값을 준비합니다.
x_test = [3.5, 5, 5.5, 6]
# 테스트 데이터를 이용해 모델이 적절한 선형회귀 모델(y=2x)을 잘 학습했는지 측정합니다.
# 예상되는 참값 : [7, 10, 11, 12]
print(sess.run(linear_model, feed_dict={x: x_test}))
| [
"[email protected]"
] | |
240135e87c6497ec19f3de5c47566288c7affbd8 | c1f40215bd8e7c2423e56422018de94bb9daaa09 | /venv/lib/python3.7/site-packages/korean_lunar_calendar/__init__.py | cbda134cde1cf4417fbcaa80daceeda4d52fcd27 | [
"MIT"
] | permissive | vchiapaikeo/prophet | e05fc1854fed35387449e6a11f89f03187f344a1 | e8c250ca7bfffc280baa7dabc80a2c2d1f72c6a7 | refs/heads/master | 2022-04-22T04:49:09.716851 | 2020-04-18T15:21:18 | 2020-04-18T15:21:18 | 256,718,973 | 0 | 0 | MIT | 2020-04-18T14:34:53 | 2020-04-18T09:53:39 | Python | UTF-8 | Python | false | false | 115 | py | from .korean_lunar_calendar import KoreanLunarCalendar
__version__ = '0.2.1'
__all__ = [ 'KoreanLunarCalendar' ]
| [
"[email protected]"
] | |
d3e3b20b1ce012f78bbc61c3eb7dc31075d016ca | c9094a4ed256260bc026514a00f93f0b09a5d60c | /tests/components/accuweather/test_system_health.py | 749f516e44c748caf05503460e8a72ec34d085d3 | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508713 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 1,785 | py | """Test AccuWeather system health."""
import asyncio
from unittest.mock import Mock
from aiohttp import ClientError
from homeassistant.components.accuweather.const import COORDINATOR, DOMAIN
from homeassistant.setup import async_setup_component
from tests.common import get_system_health_info
async def test_accuweather_system_health(hass, aioclient_mock):
"""Test AccuWeather system health."""
aioclient_mock.get("https://dataservice.accuweather.com/", text="")
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = {}
hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock(
accuweather=Mock(requests_remaining="42")
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info == {
"can_reach_server": "ok",
"remaining_requests": "42",
}
async def test_accuweather_system_health_fail(hass, aioclient_mock):
"""Test AccuWeather system health."""
aioclient_mock.get("https://dataservice.accuweather.com/", exc=ClientError)
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = {}
hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock(
accuweather=Mock(requests_remaining="0")
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info == {
"can_reach_server": {"type": "failed", "error": "unreachable"},
"remaining_requests": "0",
}
| [
"[email protected]"
] | |
2396434007ca4e61f613256865c907cbaf5c7513 | 0c774fe9c419a40212613970e3a3b454e30b0cd5 | /habitat/tasks/nav/nav_task.py | b0737e4d6b2c5107ead755159111044b09f4a9cd | [
"MIT"
] | permissive | hsouporto/habitat-api | 4080136278f67551965e1b4d26ddaeb3efc10a0c | 9c52b6a78786f37d78f540caa8f6ed9d61a2ec6e | refs/heads/master | 2021-10-20T13:14:43.770486 | 2019-02-26T06:56:19 | 2019-02-27T07:22:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,883 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, List, Optional, Type
import habitat
import numpy as np
from gym import spaces
from habitat.config import Config
from habitat.core.dataset import Episode, Dataset
from habitat.core.embodied_task import Measurements
from habitat.core.simulator import (
Simulator,
ShortestPathPoint,
SensorTypes,
SensorSuite,
)
from habitat.tasks.utils import quaternion_to_rotation, cartesian_to_polar
def merge_sim_episode_config(sim_config: Any, episode: Type[Episode]) -> Any:
sim_config.defrost()
sim_config.SCENE = episode.scene_id
sim_config.freeze()
if (
episode.start_position is not None
and episode.start_rotation is not None
):
agent_name = sim_config.AGENTS[sim_config.DEFAULT_AGENT_ID]
agent_cfg = getattr(sim_config, agent_name)
agent_cfg.defrost()
agent_cfg.START_POSITION = episode.start_position
agent_cfg.START_ROTATION = episode.start_rotation
agent_cfg.IS_SET_START_STATE = True
agent_cfg.freeze()
return sim_config
class NavigationGoal:
"""Base class for a goal specification hierarchy.
"""
position: List[float]
radius: Optional[float]
def __init__(
self, position: List[float], radius: Optional[float] = None, **kwargs
) -> None:
self.position = position
self.radius = radius
class ObjectGoal(NavigationGoal):
"""Object goal that can be specified by object_id or position or object
category.
"""
object_id: str
object_name: Optional[str]
object_category: Optional[str]
room_id: Optional[str]
room_name: Optional[str]
def __init__(
self,
object_id: str,
room_id: Optional[str] = None,
object_name: Optional[str] = None,
object_category: Optional[str] = None,
room_name: Optional[str] = None,
**kwargs
) -> None:
super().__init__(**kwargs)
self.object_id = object_id
self.object_name = object_name
self.object_category = object_category
self.room_id = room_id
self.room_name = room_name
class RoomGoal(NavigationGoal):
"""Room goal that can be specified by room_id or position with radius.
"""
room_id: str
room_name: Optional[str]
def __init__(
self, room_id: str, room_name: Optional[str] = None, **kwargs
) -> None:
super().__init__(**kwargs) # type: ignore
self.room_id = room_id
self.room_name = room_name
class NavigationEpisode(Episode):
"""Class for episode specification that includes initial position and
rotation of agent, scene name, goal and optional shortest paths. An
episode is a description of one task instance for the agent.
Args:
episode_id: id of episode in the dataset, usually episode number
scene_id: id of scene in scene dataset
start_position: numpy ndarray containing 3 entries for (x, y, z)
start_rotation: numpy ndarray with 4 entries for (x, y, z, w)
elements of unit quaternion (versor) representing agent 3D
orientation. ref: https://en.wikipedia.org/wiki/Versor
goals: list of goals specifications
start_room: room id
shortest_paths: list containing shortest paths to goals
"""
goals: List[NavigationGoal]
start_room: Optional[str]
shortest_paths: Optional[List[ShortestPathPoint]]
def __init__(
self,
goals: List[NavigationGoal],
start_room: Optional[str] = None,
shortest_paths: Optional[List[ShortestPathPoint]] = None,
**kwargs
) -> None:
super().__init__(**kwargs)
self.goals = goals
self.shortest_paths = shortest_paths
self.start_room = start_room
class PointGoalSensor(habitat.Sensor):
"""
Sensor for PointGoal observations which are used in the PointNav task.
For the agent in simulator the forward direction is along negative-z.
In polar coordinate format the angle returned is azimuth to the goal.
Args:
sim: reference to the simulator for calculating task observations.
config: config for the PointGoal sensor. Can contain field for
GOAL_FORMAT which can be used to specify the format in which
the pointgoal is specified. Current options for goal format are
cartesian and polar.
Attributes:
_goal_format: format for specifying the goal which can be done
in cartesian or polar coordinates.
"""
def __init__(self, sim, config):
self._sim = sim
self._goal_format = getattr(config, "GOAL_FORMAT", "CARTESIAN")
assert self._goal_format in ["CARTESIAN", "POLAR"]
super().__init__(config=config)
def _get_uuid(self, *args: Any, **kwargs: Any):
return "pointgoal"
def _get_sensor_type(self, *args: Any, **kwargs: Any):
return SensorTypes.PATH
def _get_observation_space(self, *args: Any, **kwargs: Any):
if self._goal_format == "CARTESIAN":
sensor_shape = (3,)
else:
sensor_shape = (2,)
return spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=sensor_shape,
dtype=np.float32,
)
def get_observation(self, observations, episode):
agent_state = self._sim.get_agent_state()
ref_position = agent_state.position
ref_rotation = agent_state.rotation
direction_vector = (
np.array(episode.goals[0].position, dtype=np.float32)
- ref_position
)
rotation_world_agent = quaternion_to_rotation(
ref_rotation[3], ref_rotation[0], ref_rotation[1], ref_rotation[2]
)
direction_vector_agent = np.dot(
rotation_world_agent.T, direction_vector
)
if self._goal_format == "POLAR":
rho, phi = cartesian_to_polar(
-direction_vector_agent[2], direction_vector_agent[0]
)
direction_vector_agent = np.array([rho, -phi], dtype=np.float32)
return direction_vector_agent
class SPL(habitat.Measure):
"""SPL (Success weighted by Path Length)
ref: On Evaluation of Embodied Agents - Anderson et. al
https://arxiv.org/pdf/1807.06757.pdf
"""
def __init__(self, sim, config):
self._previous_position = None
self._start_end_episode_distance = None
self._agent_episode_distance = None
self._sim = sim
self._config = config
super().__init__()
def _get_uuid(self, *args: Any, **kwargs: Any):
return "spl"
def reset_metric(self, episode):
self._previous_position = self._sim.get_agent_state().position.tolist()
self._start_end_episode_distance = episode.info["geodesic_distance"]
self._agent_episode_distance = 0.0
self._metric = None
def _euclidean_distance(self, position_a, position_b):
return np.linalg.norm(
np.array(position_b) - np.array(position_a), ord=2
)
def update_metric(self, episode, action):
ep_success = 0
current_position = self._sim.get_agent_state().position.tolist()
if (
action == self._sim.index_stop_action
and self._euclidean_distance(
current_position, episode.goals[0].position
)
< self._config.SUCCESS_DISTANCE
):
ep_success = 1
self._agent_episode_distance += self._euclidean_distance(
current_position, self._previous_position
)
self._previous_position = current_position
self._metric = ep_success * (
self._start_end_episode_distance
/ max(
self._start_end_episode_distance, self._agent_episode_distance
)
)
class NavigationTask(habitat.EmbodiedTask):
def __init__(
self,
task_config: Config,
sim: Simulator,
dataset: Optional[Dataset] = None,
) -> None:
task_measurements = []
for measurement_name in task_config.MEASUREMENTS:
measurement_cfg = getattr(task_config, measurement_name)
is_valid_measurement = hasattr(
habitat.tasks.nav.nav_task, # type: ignore
measurement_cfg.TYPE,
)
assert is_valid_measurement, "invalid measurement type {}".format(
measurement_cfg.TYPE
)
task_measurements.append(
getattr(
habitat.tasks.nav.nav_task, # type: ignore
measurement_cfg.TYPE,
)(sim, measurement_cfg)
)
self.measurements = Measurements(task_measurements)
task_sensors = []
for sensor_name in task_config.SENSORS:
sensor_cfg = getattr(task_config, sensor_name)
is_valid_sensor = hasattr(
habitat.tasks.nav.nav_task, sensor_cfg.TYPE # type: ignore
)
assert is_valid_sensor, "invalid sensor type {}".format(
sensor_cfg.TYPE
)
task_sensors.append(
getattr(
habitat.tasks.nav.nav_task, sensor_cfg.TYPE # type: ignore
)(sim, sensor_cfg)
)
self.sensor_suite = SensorSuite(task_sensors)
super().__init__(config=task_config, sim=sim, dataset=dataset)
def overwrite_sim_config(
self, sim_config: Any, episode: Type[Episode]
) -> Any:
return merge_sim_episode_config(sim_config, episode)
| [
"[email protected]"
] | |
1b32ea37e4c7f6126f63d235f5bc196330d2dc7e | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /dimensions_of_motion/geometry.py | d7a317cb08a95e69785f8cd0af032ae5db8a1f29 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 7,466 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Functions for sampling and warping images.
We use texture coordinates to represent points and offsets in images. They go
from (0,0) in the top-left corner of an image to (1,1) in the bottom right. It
is convenient to work with these coordinates rather than counts of pixels,
because they are resolution-independent.
"""
import tensorflow as tf
import tensorflow_addons as tfa
import utils
def check_input_shape(name, tensor, axis, value):
"""Utility function for checking tensor shapes."""
shape = tensor.shape.as_list()
if shape[axis] != value:
raise ValueError('Input "%s": dimension %d should be %s. Shape = %s' %
(name, axis, value, shape))
def pixel_center_grid(height, width):
"""Produce a grid of (x,y) texture-coordinate pairs of pixel centers.
Args:
height: (integer) height, not a tensor
width: (integer) width, not a tensor
Returns:
A tensor of shape [height, width, 2] where each entry gives the (x,y)
texture coordinates of the corresponding pixel center. For example, for
pixel_center_grid(2, 3) the result is:
[[[1/6, 1/4], [3/6, 1/4], [5/6, 1/4]],
[[1/6, 3/4], [3/6, 3/4], [5/6, 3/4]]]
"""
height_float = tf.cast(height, dtype=tf.float32)
width_float = tf.cast(width, dtype=tf.float32)
ys = tf.linspace(0.5 / height_float, 1.0 - 0.5 / height_float, height)
xs = tf.linspace(0.5 / width_float, 1.0 - 0.5 / width_float, width)
xs, ys = tf.meshgrid(xs, ys)
grid = tf.stack([xs, ys], axis=-1)
assert grid.shape.as_list() == [height, width, 2]
return grid
def sample_image(image, coords):
"""Sample points from an image, using bilinear filtering.
Args:
image: [B0, ..., Bn-1, height, width, channels] image data
coords: [B0, ..., Bn-1, ..., 2] (x,y) texture coordinates
Returns:
[B0, ..., Bn-1, ..., channels] image data, in which each value is sampled
with bilinear interpolation from the image at position indicated by the
(x,y) texture coordinates. The image and coords parameters must have
matching batch dimensions B0, ..., Bn-1.
Raises:
ValueError: if shapes are incompatible.
"""
check_input_shape('coords', coords, -1, 2)
tfshape = tf.shape(image)[-3:-1]
height = tf.cast(tfshape[0], dtype=tf.float32)
width = tf.cast(tfshape[1], dtype=tf.float32)
# Resampler expects coordinates where (0,0) is the center of the top-left
# pixel and (width-1, height-1) is the center of the bottom-right pixel.
pixel_coords = coords * [width, height] - 0.5
# tfa.image.resampler only works with exactly one batch dimension, i.e. it
# expects image to be [batch, height, width, channels] and pixel_coords to be
# [batch, ..., 2]. So we need to reshape, perform the resampling, and then
# reshape back to what we had.
batch_dims = len(image.shape.as_list()) - 3
assert (image.shape.as_list()[:batch_dims] == pixel_coords.shape.as_list()
[:batch_dims])
batched_image, _ = utils.flatten_batch(image, batch_dims)
batched_coords, unflatten_coords = utils.flatten_batch(
pixel_coords, batch_dims)
resampled = tfa.image.resampler(batched_image, batched_coords)
# Convert back to the right shape to return
resampled = unflatten_coords(resampled)
return resampled
def bilinear_forward_warp(image, coords, weights=None):
"""Forward warp each point in an image using bilinear filtering.
This is a sort of reverse of sample_image, in the sense that scatter is the
reverse of gather. A new image is generated of the same size as the input, in
which each pixel has been splatted onto the 2x2 block containing the
corresponding coordinates, using bilinear weights (multiplied with the input
per-pixel weights, if supplied). Thus if two or more pixels warp to the same
point, the result will be a blend of the their values. If no pixels warp to a
location, the result at that location will be zero.
Args:
image: [B0, ..., Bn-1, height, width, channels] image data
coords: [B0, ..., Bn-1, height, width, 2] (x,y) texture coordinates
weights: [B0, ... ,Bn-1, height, width] weights for each point. If omitted,
all points are weighed equally. Use this to implement, for example, soft
z-buffering.
Returns:
[B0, ..., Bn-1, ..., channels] image data, in which each point in the
input image has been moved to the position indicated by the corresponding
(x,y) texture coordinates. The image and coords parameters must have
matching batch dimensions B0, ..., Bn-1.
"""
# Forward-warp computed using the gradient of reverse-warp. We use a dummy
# image of the right size for reverse-warping. An extra channel is used to
# accumulate the total weight for each pixel which we'll then divide by.
image_and_ones = tf.concat([image, tf.ones_like(image[Ellipsis, -1:])], axis=-1)
dummy = tf.zeros_like(image_and_ones)
if weights is None:
weighted_image = image_and_ones
else:
weighted_image = image_and_ones * weights[Ellipsis, tf.newaxis]
with tf.GradientTape(watch_accessed_variables=False) as g:
g.watch(dummy)
reverse = tf.reduce_sum(
sample_image(dummy, coords) * weighted_image, [-3, -2])
grads = g.gradient(reverse, dummy)
rgb = grads[Ellipsis, :-1]
total = grads[Ellipsis, -1:]
result = tf.math.divide_no_nan(rgb, total)
return result
def flow_warp(image, flow):
"""Warp images by resampling according to flow vectors.
Args:
image: [..., H, W, C] images
flow: [..., H, W, 2] (x, y) texture offsets
Returns:
[..., H, W, C] resampled images. Each pixel in each output image has been
bilinearly sampled from the corresponding pixel in its input image plus
the (x, y) flow vector. The flow vectors are texture coordinate offsets,
e.g. (1, 1) is an offset of the whole width and height of the image.
Sampling outside the image yields zero values.
"""
width = image.shape.as_list()[-2]
height = image.shape.as_list()[-3]
grid = pixel_center_grid(height, width)
coords = grid + flow
return sample_image(image, coords)
def flow_forward_warp(image, flow):
"""Forward-warp images according to flow vectors.
Args:
image: [..., H, W, C] images
flow: [..., H, W, 2] (x, y) texture offsets
Returns:
[..., H, W, C] warped images. Each pixel in each image is offset according
to the corresponding value in the flow, and splatted onto a 2x2 pixel block.
(See bilinear_forward_warp for details.) If no points warp to a location,
the result will be zero. The flow vectors are texture coordinate offsets,
e.g. (1, 1) is an offset of the whole width and height of the image.
"""
width = image.shape.as_list()[-2]
height = image.shape.as_list()[-3]
grid = pixel_center_grid(height, width)
coords = grid + flow
return bilinear_forward_warp(image, coords)
| [
"[email protected]"
] | |
8320743766681fb6216634bf66a0136b8a9e19cf | 9fe646e011269454ef6219fb7d87159df02b8555 | /strings/anagram.py | 7f462b75c5c58a54cdeac7dc405397e4f2805774 | [] | no_license | shreyas-selvaraj/geeks_for_geeks_problems | 34fe9b01cebf6be35bd3d3ba9802478ff256c928 | a3d244494df86ba26bad0576469dc936ea027100 | refs/heads/master | 2022-11-28T22:01:08.851558 | 2020-07-29T11:59:15 | 2020-07-29T11:59:15 | 283,486,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | #add characters for first into hashtable
#go through second string, then either delete from second string or decrement hashtable
def isAnagram(str1, str2):
d = {}
for char in str1:
if char in d.keys():
d[char] += 1
else:
d[char] = 1
for char in str2:
if char in d.keys():
d[char] -= 1
else:
return "NO"
for key in d.keys():
if(d[key] != 0):
return "NO"
return "YES"
T = int(input())
ans = []
for i in range(T):
strings = input().split(" ")
ans.append(isAnagram(strings[0], strings[1]))
for a in ans:
print(a) | [
"[email protected]"
] | |
7642072e77aebda4174a74cfe093db22e6377af7 | 7bd0954e956993df19d833810f9d71b60e2ebb9a | /phasor/utilities/ipynb/hdf.py | b9f7e5b1add89064ffd726859cfe27d4415619ec | [
"Apache-2.0"
] | permissive | aa158/phasor | 5ee0cec4f816b88b0a8ac298c330ed48458ec3f2 | fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d | refs/heads/master | 2021-10-22T09:48:18.556091 | 2019-03-09T18:56:05 | 2019-03-09T18:56:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | # -*- coding: utf-8 -*-
"""
"""
from __future__ import division, print_function, unicode_literals
import h5py
from declarative.bunch.hdf_deep_bunch import HDFDeepBunch
| [
"[email protected]"
] | |
0f1635b44d0e45fa5a033f4ffd633e66ef241110 | 2e8a05666d97c059500258fce9d88c59b9d55e40 | /DSA-II/Lab1 - Intro Python/q4.py | 73d95365f86221ab0c5a279cc498c7b9927c840f | [] | no_license | aksh555/IT-Labs | b1d3128a0e685389ac0bf6fe522af77f8187da66 | f14e8f94e5fa9a1083e00d2b66bf48bb9596d051 | refs/heads/main | 2023-07-10T03:01:58.244079 | 2021-08-09T11:16:11 | 2021-08-09T11:16:11 | 317,137,908 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | def bub(lis):
n = len(lis)
for i in range(n):
for j in range(n-i-1):
if(lis[j] > lis[j+1]):
lis[j],lis[j+1] = lis[j+1],lis[j]
print(lis)
def sel(lis):
n = len(lis)
for i in range(n):
for j in range(i+1, n):
if(lis[i] > lis[j]):
lis[i],lis[j] = lis[j],lis[i]
print(lis)
def main():
n = int(input())
lis = [int(input()) for i in range(n)]
print("Original List: " + str(lis))
print("Bubble Sort")
bub(lis)
print("Selection Sort")
sel(lis)
main()
| [
"[email protected]"
] | |
267f5e570bff6ec85a0e60de98259cea7422da0e | edb37da2fd2d2f048df119db96a6de58fc816ddb | /jumpserver-0.4/zrd/my_blog/article/views.py | 0634c5361e1cf968ac0e81b87ea55908e18fa6b5 | [] | no_license | cucy/2017 | 88f1aa2e8df945162d8259918cf61a138a3422cf | 33bcdd5c9e0717521544e3ea41ade10fbb325c4f | refs/heads/master | 2020-05-21T15:31:39.935733 | 2017-07-10T11:04:29 | 2017-07-10T11:04:29 | 84,629,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,766 | py | # coding:utf-8
from django.shortcuts import render
from django.shortcuts import render_to_response
# Create your views here.
from django.http import HttpResponse
from models import SSHInfo
# Create your views here.
try:
from ConfigParser import ConfigParser
except:
from configparser import ConfigParser
try:
import paramiko_client
except:
from . import paramiko_client
def home(request):
# 如果请求里有file
for key in request.FILES:
file = request.FILES[key]
config = ConfigParser() # 读取配置文件
config.readfp(file)
for section in config.sections():
print(section)
host_name = config.get(section, 'host_name')
host = config.get(section, 'host')
port = config.get(section, 'port')
usr = config.get(section, 'username')
pwd = config.get(section, 'password')
new_ssh, create = SSHInfo.objects.update_or_create(
host_name=host_name
, host=host
, port=port
, usr=usr
, pwd=pwd
)
new_ssh.save() # 保存配置信息到数据库
sshs = SSHInfo.objects.all() # 获取所有对象
if len(sshs) > 0:
return render_to_response('sshlist.html', {'sshs': sshs})
else:
return render_to_response('home_view.html')
def run_ssh_cmd(requset):
# 获取所有的信息
sshs = SSHInfo.objects.all()
cmd_res = {}
for ssh in sshs:
client = paramiko_client.ParamikoClient()
client.connect(ssh)
res = client.run_cmd('date') # 执行命令 接收返回
cmd_res[ssh.host_name] = res
return render_to_response('cmd_res.html', {'cmd_res': cmd_res})
| [
"[email protected]"
] | |
279593dcda5a4a08ad35a32f2ac454368e4ba63e | 364b764562715c22220e7c4bfdd1b1b090460c42 | /BackEnd/main3.py | e1e76d0e0c26a0a8c2d4eb267559b8c1e53544e1 | [] | no_license | AadSah/bona-fide | a36eaaf72a71d0cefb0ccf8e5ee6a4c7c369a632 | c07ad0b24ce8b9c84feeda93384a64572f816e08 | refs/heads/master | 2020-04-27T18:27:39.065598 | 2019-09-18T14:54:33 | 2019-09-18T14:54:33 | 171,000,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,789 | py | #plag-check using first 3 urls...accurate but slow a bit...
import getURL3
import webSearch
import comparefuzzy
content = open("MyTestFile.txt","r+") #user data importing...
matched = open("matchedSources.txt","w+") #to write the matched sources or urls...
highlight = open("highlightedText.txt","w+") #to write the text with plagiarised content marked...
linecount=0 #keeps a line count
plagper=0 #finds the plagiarism percentage
maxurl=None #url with maximum matching
for contentline in content: #going through each line in the user data
if(contentline!="\n"): #get rid of unrequired lines read
linecount+=1
URLsToCheck = [None]*3 #For checking first 3 URLs
URLsToCheck = getURL3.URLFinder(contentline)
maxval=0 #a flag variable
for j in range(3): #for all 3 urls
if(URLsToCheck[j]!=None):
webSearch.searchResults(URLsToCheck[j])
tempval = comparefuzzy.check(contentline)
if(tempval>maxval):
maxval = tempval
maxurl = URLsToCheck[j]
tempval = 0
if(maxval>85):
break
if(maxval>85): #85%...a threshold value for Levenshtein Distance...
plagper += 100 #kept a threshold value of 85% as per the performance of the algo seen before
matched.write("Line-"+str(linecount)+"::"+maxurl+"\n") #writing for matched sources
# highlight.write("<font color=\"red\"><b>"+contentline+"</b></font>\n") #writing for highlighting
highlight.write(contentline.upper() + "{" + maxurl + "}\n")
else:
plagper += maxval
highlight.write(contentline) #writing non-highlighted
plagper /= linecount #getting the percentage
uniper = 100 - plagper
print("{:.2f}".format(plagper)+"%") #print the result
print("{:.2f}".format(uniper)+"%") #print the result
#closing the streams...
highlight.close()
matched.close()
content.close() | [
"[email protected]"
] | |
7b210b9bf6bb67cf1658eacd862c0112692debfb | 71ffa1fbccd5a7066a2523f2b4d15ead2fbd5238 | /Lab07/Bai07/hello.py | 9d01f07e04fc999368caf0fdc1bd2a143e7e0c8b | [] | no_license | TanLinh15/LTPythonNC | 98bab968f1fa782540e2c344c6d8c9932491475d | 890c756a1466262d294a5a21882a8fc0fb187607 | refs/heads/main | 2023-09-03T10:52:33.447781 | 2021-11-10T02:09:20 | 2021-11-10T02:09:20 | 406,348,063 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | from flask import render_template, Flask
app = Flask(__name__)
@app.route('/hello')
@app.route('/hello/<name>')
def index(name=None):
return render_template('hello.html', name=name)
if __name__ == '__main__':
app.run(port=5000) | [
"[email protected]"
] | |
abc2e14c55f8110ca3d0bc1403c2b44d4e5fe36e | 026fee65b95206995baf1565f486ab4ed7f7cef9 | /userprofiles/admin.py | 89683d76fdacc00428bfbad69cc1e019d3f01b5e | [] | no_license | santhoshpkumar/pinclone | e8460aab355ebf3e5559d44127d7ccad22667747 | 8bf641df9a4999797731d1d2fb4ff3d78d717e10 | refs/heads/master | 2020-04-03T09:39:27.269726 | 2018-10-08T10:51:51 | 2018-10-08T10:51:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from django.contrib import admin
from .models import Profile
# Register your models here.
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'bio', 'website', 'birth_date')
| [
"[email protected]"
] | |
f1cf6c44e0320547db577c64e785fe3b377a1617 | 4d35236510781f6ebf1171383617771be460dc2c | /airModelWeb.py | ae802a92308b6c42be326cc36f4dc0da060f5a73 | [] | no_license | jhwujialing/guizhouemergencymodels | 3b9f81a645d6e587659280fd65828d6d65220502 | 49efe6e82c19a9f5957fe24a6ad373fae8d1a7e1 | refs/heads/master | 2022-10-03T21:24:34.044754 | 2020-06-09T03:06:11 | 2020-06-09T03:06:11 | 270,889,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,572 | py | from flask import Flask
from flask import request
import json
import numpy as np
from airModels import ContinuousModel
from concurrent.futures import ThreadPoolExecutor
import math
import requests
import time
executor = ThreadPoolExecutor(1)
app = Flask(__name__)
@app.route('/continuousModel', methods=['POST'])
def continuousModel():
data = request.get_data()
data = json.loads(data)
# print(data)
Q = data['q']
u = data['u']
tl = data['tl']
angle = data['angle']
identity = data['id']
lg = data['lg']
lt = data['lt']
angle = math.radians(angle)
try:
H = data['height']
except:
H=0
try:
stability = data['airStability']
except:
stability = 'D'
try:
roughness = data['roughness']
except:
roughness = 0.1
total_time = data['kssc']
executor.submit(continuousModelBackground, Q,u,tl,angle,identity,lg,lt,H,stability,roughness,total_time)
return json.dumps(1)
def continuousModelBackground(Q,u,tl,angle,identity,lg,lt,H,stability,roughness,total_time):
air = ContinuousModel(Q,u,H,stability,roughness,tl)
time_range = (int(total_time/24))
#time_range = (int(total_time / 2))
list_t = [time_range*i for i in range(1,25)]
#list_t = [time_range * i for i in range(1, 3)]
list_z = [H+j for j in range(-5,20,5)]
val_data = []
center = []
list_t_0 = [t for t in list_t if t<=tl]
list_t_1 = [t for t in list_t if t>tl]
for t in list_t_0:
center.append([lg, lt])
for t in list_t_1:
x = u*t + u*tl/4
y = 0
x_ = x * math.cos(angle) - y * math.sin(angle)
y_ = x * math.sin(angle) + y * math.cos(angle)
x2j = lg + x_/100*0.0009
y2w = lt + y_/100*0.0008
center.append([x2j, y2w])
the_peak_array = []
if H !=0:
the_peak = ''
for t in list_t_0:
if the_peak == 0:
the_peak_json = {'the_peak': the_peak, 'time': t}
the_peak_array.append(the_peak_json)
continue
# print(t)
if u>=3:
list_x = list(range(0, int(u * t/2),(int(u * t/2/400) if int(u * t/2/400)>0 else 1)))+list(range(int(u * t/2), int(u * t*3/2), int(u * t*3/200) if int(u * t*3/200)>0 else 1))
list_y = list_x
else:
list_x = list(range(0, int(u * t/2), int(u * t/2/200) if int(u * t/2/200)>0 else 1))+list(range(int(u * t/2), int(u * t*3/2), int(u * t*3/200) if int(u * t*3/200)>0 else 1))
list_y = list_x
list_z = [H + j if H + j > 0 else 0 for j in range(-10, 10, 5)]
list_z = set(list_z)
the_peak = 0
for x in list_x:
for y in list_y:
x_b = x * math.cos(-angle) - y * math.sin(-angle)
if x_b < 1:
continue
y_b = x * math.sin(-angle) + y * math.cos(-angle)
for z_b in list_z:
res = air.getNd(x_b,y_b,z_b,t)
if res >= 1:
#print(t, x, y, res)
x2j = lg + x/100*0.0009
y2w = lt + y/100*0.0008
val_data.append([int(t), x2j, y2w, round(res,2)])
if round(res,2)>the_peak:
the_peak = round(res,2)
else:
the_peak = the_peak
the_peak_json = {'the_peak':the_peak,'time':t}
the_peak_array.append(the_peak_json)
the_peak = ''
for t in list_t_1:
# print(t)
if the_peak == 0:
the_peak_json = {'the_peak': the_peak, 'time': t}
the_peak_array.append(the_peak_json)
continue
list_x = list(range(0, int((u*t+u*tl/2)/3),int((u*t+u*tl/2)/3/100) if int((u*t+u*tl/2)/3/100)>0 else 1))+list(range(int((u*t+u*tl/2)/3), int(u*t+u*tl/2),int((u*t+u*tl/2)/100) if int((u*t+u*tl/2)/100) >0 else 1))
list_y = list_x
list_z = [H + j if H + j > 0 else 0 for j in range(-10, 10, 5)]
list_z = set(list_z)
the_peak = 0
for x in list_x:
for y in list_y:
x_b = x * math.cos(-angle) - y * math.sin(-angle)
if x_b < 1:
continue
y_b = x * math.sin(-angle) + y * math.cos(-angle)
for z_b in list_z:
res = air.getNd(x_b, y_b,z_b, t)
if res >= 1:
#print(t, x, y, res)
x2j = lg + x/100*0.0009
y2w = lt + y/100*0.0008
val_data.append([int(t), x2j, y2w, round(res,2)])
if round(res,2)>the_peak:
the_peak = round(res,2)
else:
the_peak = the_peak
the_peak_json = {'the_peak':the_peak,'time':t}
the_peak_array.append(the_peak_json)
else:
the_peak = ''
for t in list_t_0:
if the_peak == 0:
the_peak_json = {'the_peak': the_peak, 'time': t}
the_peak_array.append(the_peak_json)
continue
if u>=3:
list_x = list(range(0, int(u * t/2),(int(u * t/2/400) if int(u * t/2/400)>0 else 1)))+list(range(int(u * t/2), int(u * t*3/2), int(u * t*3/200) if int(u * t*3/200)>0 else 1))
list_y = list_x
else:
list_x = list(range(0, int(u * t/2), int(u * t/2/200) if int(u * t/2/200)>0 else 1))+list(range(int(u * t/2), int(u * t*3/2), int(u * t*3/200) if int(u * t*3/200)>0 else 1))
list_y = list_x
#list_y = list_x
the_peak = 0
for x in list_x:
for y in list_y:
x_b = x * math.cos(-angle) - y * math.sin(-angle)
if x_b < 1:
continue
y_b = x * math.sin(-angle) + y * math.cos(-angle)
res = air.getNd(x_b, y_b,0, t)
if res >= 1:
x2j = lg + x / 100 * 0.0009
y2w = lt + y / 100 * 0.0008
val_data.append([int(t), x2j, y2w, round(res, 2)])
if round(res,2)>the_peak:
the_peak = round(res,2)
else:
the_peak = the_peak
the_peak_json = {'the_peak':the_peak,'time':t}
the_peak_array.append(the_peak_json)
the_peak = ''
for t in list_t_1:
if the_peak == 0:
the_peak_json = {'the_peak': the_peak, 'time': t}
the_peak_array.append(the_peak_json)
continue
list_x = list(range(0, int((u*t+u*tl/2)/3),int((u*t+u*tl/2)/3/100) if int((u*t+u*tl/2)/3/100)>0 else 1))+list(range(int((u*t+u*tl/2)/3), int(u*t+u*tl/2),int((u*t+u*tl/2)/100) if int((u*t+u*tl/2)/100) >0 else 1))
#list_x = range(int(u*tl/2), int(u*t+u*tl/2), int((u*t+u*tl/2)/200))
#list_y = range(0, int(u * t + u * tl / 2), int((u * t + u * tl/2) / 100))
list_y = list_x
the_peak = 0
for x in list_x:
for y in list_y:
x_b = x * math.cos(-angle) - y * math.sin(-angle)
if x_b < 1:
continue
y_b = x * math.sin(-angle) + y * math.cos(-angle)
res = air.getNd(x_b, y_b,0, t)
if res >= 1:
#print(x_b, y_b, t, res)
# print(t, x, y, res)
x2j = lg + x / 100 * 0.0009
y2w = lt + y / 100 * 0.0008
val_data.append([int(t), x2j, y2w, round(res, 2)])
if round(res,2)>the_peak:
the_peak = round(res,2)
else:
the_peak = the_peak
the_peak_json = {'the_peak':the_peak,'time':t}
the_peak_array.append(the_peak_json)
#print (the_peak_array)
all_data = {}
all_data['center'] = center
all_data['data'] = val_data
all_data['the_peak'] = the_peak_array
data = {'id':identity}
files = {'file':json.dumps(all_data)}
#url = 'http://172.18.21.16:8890/1/AtmosphericEvent/receiveJson'
url = 'http://172.18.63.22:8888/1/AtmosphericEvent/receiveJson'
#url = 'http://172.18.22.75:8891/test/AtmosphericEvent/receiveJson'
response = requests.post(url, data=data, files=files)
print("文件已发送")
print(response.text)
# with open("all_data.json", 'w', encoding='utf-8') as json_file:
# json.dump(all_data, json_file, ensure_ascii=False)
if __name__ == '__main__':
# Q = 24600000
# u = 1.9
# tl = 600
# angle = 90
# lg = 106.86
# lt = 27.131
# identity = 92
# angle = math.radians(angle)
# continuousModelBackground(Q,u,tl,angle,identity,lg,lt)
app.run(host='0.0.0.0',port = 8080)
| [
"[email protected]"
] | |
ed520aeb8ea0f5db10a956ac573a6e155f2ceac4 | 14487e74a1cb67ca75e17271cc57df607a86ecae | /ingenius/urls.py | 54cbf1dbe299995d38b68abe311ede4782f2088c | [] | no_license | Naveenaidu/TEDxPESITBSC-Registration_System | e14f5f9e8f8a18e90c95c4b6b8da1876ead08e47 | 69631730c58bac2351978e7ef59e5a37bfaff696 | refs/heads/master | 2020-04-03T20:01:41.466969 | 2018-04-06T18:26:59 | 2018-04-06T18:26:59 | 155,544,689 | 0 | 0 | null | 2018-10-31T11:15:41 | 2018-10-31T11:15:40 | null | UTF-8 | Python | false | false | 1,347 | py | """ingenius URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$','reg.views.home',name='home'),
url(r'^login/?$','reg.views.login_user',name='login'),
url(r'^logout/?$','reg.views.logout_user',name='logout'),
url(r'^register/?$','reg.views.register',name='register'),
url(r'^breakfast/?$','reg.views.breakfast',name='breakfast'),
url(r'^lunch/?$','reg.views.lunch',name='lunch'),
url(r'^dinner/?$','reg.views.dinner',name='dinner'),
url(r'^check_in/?$','reg.views.check_in',name='check_in'),
url(r'^stats/?$','reg.views.stats',name='stats'),
url(r'^dashboard/?$','reg.views.dashboard',name='dashboard'),
]
| [
"[email protected]"
] | |
96a5bb4ad4380aa447a2da36fa9b1bf172bcfe6d | 2d575cd9beed6840de0ca787134354bb2511e44f | /evaluation/word_analogy.py | 4922f90149011a3e74d8cac37b49e58ff513e778 | [] | no_license | truythu169/snml-skip-gram | b949fdb35c6ed89364a8253025f41c7df85f4e7e | 02ff190a256724cbd5b656d7ea1cb235abb08863 | refs/heads/master | 2023-04-16T13:55:17.409637 | 2020-06-18T09:52:44 | 2020-06-18T09:52:44 | 210,846,786 | 1 | 0 | null | 2023-03-24T22:09:21 | 2019-09-25T13:03:20 | Python | UTF-8 | Python | false | false | 5,072 | py | from utils.embedding import Embedding
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from statistics import mean
class WordAnalogy:
def __init__(self, filename='datasets/word_analogy/google_analogy.txt'):
with open(filename, "r") as f:
L = f.read().splitlines()
# Simple 4 word analogy questions with categories
questions = []
answers = []
category = []
cat = None
for l in L:
l = l.lower()
if l.startswith(":"):
cat = l.split()[1]
else:
words = l.split()
questions.append(words[0:3])
answers.append(words[3])
category.append(cat)
syntactic = set([c for c in set(category) if c.startswith("gram")])
category_high_level = []
for cat in category:
category_high_level.append("syntactic" if cat in syntactic else "semantic")
self.X = np.array(questions)
self.y = np.array(answers)
self.category = np.array(category)
self.category_high_level = np.array(category_high_level)
self.top_words = []
def set_top_words(self, filename):
with open(filename, "r", encoding='utf-8') as f:
words = f.read().splitlines()
self.top_words = set(words)
def get_data_by_category(self, cat, high_level_category=False):
if high_level_category:
data_indexes = np.where(self.category_high_level == cat)[0]
else:
data_indexes = np.where(self.category == cat)[0]
return self.X[data_indexes], self.y[data_indexes]
def evaluate(self, embedding, high_level_category=False, restrict_top_words=False):
# Categories list
if high_level_category:
cat_list = set(self.category_high_level)
else:
cat_list = set(self.category)
# Devide data into categories
X = {}
labels = {}
skip_lines = 0
for cat in cat_list:
X_cat, y_cat = self.get_data_by_category(cat, high_level_category)
skipped_labels = []
skipped_X = []
# convert all words to int and skip words not exist in vocab
for i in range(len(X_cat)):
x = X_cat[i]
y = y_cat[i]
if embedding.in_vocabs(x) and embedding.in_vocab(y):
skipped_X.append(embedding.indexes(x))
skipped_labels.append(embedding.index(y))
else:
skip_lines += 1
X[cat] = skipped_X
labels[cat] = skipped_labels
# print('Skipped {} lines.'.format(skip_lines))
# Predict answer vector
predictions = {}
for cat in cat_list:
X_cat, y_cat = X[cat], labels[cat]
pred_vectors = []
if len(X_cat) == 0:
continue
for x in X_cat:
x = embedding.vectors(x)
pred_vector = x[1] - x[0] + x[2]
pred_vectors.append(pred_vector)
# Get cosine similarity of predicted answer to all words in vocab
pred_vectors = np.array(pred_vectors)
distance_matrix = cosine_similarity(pred_vectors, embedding.e)
# Remove words that were originally in the query
for i in range(len(X_cat)):
distance_matrix[i][X_cat[i]] = 0
# Get nearest word
result = []
for i in range(len(X_cat)):
most_similar = distance_matrix[i].argsort()[::-1]
for j in range(len(most_similar)):
pred = most_similar[j]
if restrict_top_words:
if embedding.word(pred) in self.top_words:
break
else:
break
result.append(1) if pred == y_cat[i] else result.append(0)
# accuracy
acc = mean(result)
# result
# print("Category: %-30s, accuracy: %f (all: %d)" % (cat, acc, len(X_cat)))
predictions[cat] = acc
# overall
total_count = 0
acc = 0
for cat in cat_list:
cat_count = len(X[cat])
if cat_count == 0:
continue
acc += cat_count * predictions.get(cat)
total_count += cat_count
if total_count == 0:
predictions['all'] = 0
else:
predictions['all'] = acc / total_count
# print("All Category accuracy: %f" % (acc / total_count))
return predictions
if __name__ == "__main__":
word_analogy = WordAnalogy()
word_analogy.set_top_words('../../data/text8_ng4/top_30000_words.txt')
file_name = '../../output/skip_gram/text8_ng4/test/50dim/step-0/embedding.txt'
embedding = Embedding.from_file(file_name)
result = word_analogy.evaluate(embedding, high_level_category=False, restrict_top_words=False)
| [
"[email protected]"
] | |
aa746aaa44faf8a380306ce14dd173cda14750ab | e4002b074bc95ecbd9b0fab187f74fcc5672b3e0 | /hatemap/settings.py | b97eb25712931d26c700bfe4621ceed6d3b2c9c7 | [] | no_license | ackaraosman/hatemap | 4e22996116f97c3764a6366ac97b2b423780c600 | dfd4d7803664c928e5d831c910ac564d20a6dc4f | refs/heads/master | 2021-05-04T10:41:44.010473 | 2015-06-07T23:30:39 | 2015-06-07T23:30:39 | 54,372,895 | 0 | 0 | null | 2020-10-13T08:54:10 | 2016-03-21T08:45:08 | Python | UTF-8 | Python | false | false | 2,099 | py | """
Django settings for hatemap project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'r^ddot&qgz(cryjd)j8hnqr5uqsm2w-b3a7s+h-ib(9)-7hl7('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'twitter',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'hatemap.urls'
WSGI_APPLICATION = 'hatemap.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'hatemap',
'USER': 'postgres',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
a2795103476b0cc82d3855d67eddad1199119d5d | 22295cda10cf11472fee987093e0b245f6f96ef3 | /common/2016astr/figure_movement_vs_sound_modulation.py | 3d8696a961cb42e4f03be4d3cf8699449ce1acb2 | [] | no_license | sjara/jaratest | aecb9e3bcc1ff91db35e7cd551c0f4f3da0b690a | 09bf2c76bd5bf45191a2c37c14171ae1e8902c4b | refs/heads/master | 2023-08-11T09:55:17.684814 | 2023-08-03T22:03:31 | 2023-08-03T22:03:31 | 63,100,718 | 2 | 5 | null | 2023-04-11T18:14:08 | 2016-07-11T20:43:04 | Python | UTF-8 | Python | false | false | 176 | py | '''
Script to make a scatter plot of movement modulation index vs modulation index of sound response by choice, for psychometric curve mice and switching mice separately.
'''
| [
"[email protected]"
] | |
d93ed6d435424c5b9304063f5b394607269e8293 | 31de2bb140c9e4c5cf4ec505a2524afaa211da4c | /74_is_strong_number.py | e7a6f2f118b1bd68db169d8063af992a9d999952 | [] | no_license | Vidhi23Chauhan/akademize_grofers_python | d785a6dbd8007cb383cb9ce622f0c69976a07e14 | ca9f653e19ac64be49e7730ab16c1fd404f235e6 | refs/heads/main | 2023-04-05T07:19:07.328267 | 2021-04-14T10:31:09 | 2021-04-14T10:31:09 | 351,513,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | def get_factorial(n):
fact = 1
for i in range(n, 0, -1):
fact = fact * i
return fact
def is_strong(n):
i = n
sum = 0
while i > 0:
rem = i % 10
sum += get_factorial(rem)
i //= 10 # same as i = i // 10
return n == sum
def main():
print("Is Strong number:")
print(f"1 -> {is_strong(1)}")
print(f"2 -> {is_strong(2)}")
print(f"145 -> {is_strong(145)}")
print(f"146 -> {is_strong(146)}")
print(f"370 -> {is_strong(370)}")
main()
"""
Is Strong number:
1 -> True
2 -> True
145 -> True
146 -> False
370 -> False
"""
"""
Strong number is a special number whose sum of factorial of digits
is equal to the original number.
For example: 145 is strong number. Since, 1! + 4! + 5! = 145
""" | [
"[email protected]"
] | |
a5235c799186a4e9446f729d5748ae459dd5f73e | 4870960bc25aa9264d3ead399f1662bda3880e19 | /Create_video.py | cdf7329a51f8592ae582ad5bbc39b6293f031836 | [] | no_license | Megapixel-code/Video-maker-with-Reddit | 5fff90a2241298044c8c567dcc39fc4e60218285 | 0f69670fce22e0de652448ee59236dfad29aee7b | refs/heads/main | 2023-03-21T02:47:58.804567 | 2021-03-06T09:44:39 | 2021-03-06T09:44:39 | 344,571,437 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,952 | py | import glob
import os
import praw
import requests
import shutil
import json
import moviepy.editor as mp
import moviepy.video as mpv
import moviepy.video.fx.all as vfx
from gtts import gTTS
from PIL import Image, ImageDraw, ImageFont
from unidecode import unidecode
from os.path import isfile, join
def delete_all_folder():
directory = 'reddit'
os.chdir(directory)
files = glob.glob('*')
for file_name in files:
os.unlink(file_name)
os.chdir('..')
def deemojify(input_str):
return_output = ''
for car in input_str:
try:
car.encode('ascii')
return_output += car
except UnicodeEncodeError:
replaced = unidecode(str(car))
if replaced != '':
return_output += replaced
return " ".join(return_output.split())
def get_images():
directory = 'reddit'
# https://www.reddit.com/r/mildlyinteresting/top/?t=week
with open('credentials.json') as c:
params = json.load(c)
reddit = praw.Reddit(
client_id=params['client_id'],
client_secret=params['api_key'],
password=params['password'],
user_agent='<reddit_top> accessAPI:v0.0.1 (by/u/Megapixel_YTB)',
username=params['username']
)
subreddit = reddit.subreddit('mildlyinteresting')
name = 0
for submitions in subreddit.top("week", limit=50):
name += 1
url = submitions.url
file_name = str(name)
if url.endswith('.jpg'):
file_name += '.jpg'
found = True
else:
found = False
if found:
r = requests.get(url)
with open(file_name, 'wb') as f:
f.write(r.content)
shutil.move(file_name, directory)
caption = submitions.title
title = str(name)
title += '.txt'
with open(title, 'wt') as c:
c.write(deemojify(caption))
c.close()
shutil.move(title, directory)
def resize(im, fill_color=(0, 0, 0, 0)):
img = Image.open(im)
x, y = img.size
sizex = int(y / 1080 * 1920)
sizey = y
new_im = Image.new('RGB', (sizex, sizey), fill_color)
new_im.paste(img, (int((sizex - x) / 2), int((sizey - y) / 2)))
new_im = new_im.resize((1920, 1080), Image.LANCZOS)
f = open(im[:-4] + '.txt', 'r')
content = f.read()
draw = ImageDraw.Draw(new_im)
draw.rectangle(((0, 0), (1920, 25)), fill=(0, 0, 0))
font = ImageFont.truetype('arialbd.ttf', size=18)
txt_size = draw.textsize(content, font=font)[0]
draw.text((int((1920 - txt_size) / 2), 0), content, fill=(255, 255, 255), font=font)
f.close()
os.remove(im)
new_im.save(im)
def create_tts():
for file in [f for f in os.listdir('reddit/') if isfile(join('reddit/', f)) and f.endswith('.txt')]:
f = open('reddit/' + file, 'r')
my_txt = f.read()
f.close()
out = gTTS(text=my_txt, lang='en', slow=False)
out.save('reddit/' + file[:-4] + '.mp3')
def finish_video():
all_clips = []
for file in [f for f in os.listdir('reddit/') if isfile(join('reddit/', f)) and f.endswith('.mp3')]:
sound = mp.AudioFileClip('reddit/' + file)
sound = mp.concatenate_audioclips([sound, mp.AudioClip(lambda t: 0, duration=3)])
all_clips.append(sound)
all_video_clips = []
x = 0
for file in [f for f in os.listdir('reddit/') if isfile(join('reddit/', f)) and f.endswith('.jpg')]:
resize('reddit/' + file)
vid = mp.ImageClip('reddit/' + file, duration=all_clips[x].duration)
all_video_clips.append(vid)
x += 1
sound = mp.concatenate_audioclips(all_clips)
video = mp.concatenate_videoclips(all_video_clips)
video.audio = sound
video.fps = 60
background = mp.VideoFileClip('space.mpeg')
masked_clip = mpv.fx.all.mask_color(video, color=[0, 0, 0], thr=0, s=0)
midle_video = mp.CompositeVideoClip([background, masked_clip]).set_duration(video.duration)
intro = mp.VideoFileClip('Intro.mpeg')
outro = mp.VideoFileClip('Outro.mpeg')
final_video = mp.concatenate_videoclips([intro, midle_video, outro])
os.remove('ma_video.mp4')
final_video.write_videofile('ma_video.mp4')
def create():
print()
delete_all_folder()
print('Importing the images .....', end='')
get_images()
print(' done !')
print('creating tts .............', end='')
create_tts()
print(' done !')
print('Making the video .........')
print('===============================================================================================')
finish_video()
print('===============================================================================================')
| [
"[email protected]"
] | |
7f23a7ced3ebf5f2daafcf14a80021056b5c0d75 | 477fa324ca50a5ea7e3094bc1e42d02fa25c19d4 | /train_diff_layer/train_1layer.py | bed8bf45e17c66e597129bc2897a3b0f61f061a8 | [] | no_license | coldwindx/HetGraphAE | cfcf21f26a8904268082c113224f5218586818bb | f0e0853da9a07bea0f4eb70a41534716d26d4d06 | refs/heads/master | 2023-08-23T16:10:23.354443 | 2021-10-13T02:26:51 | 2021-10-13T02:26:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,140 | py | import torch
import torch.optim as optim
import torch.nn.functional as F
import numpy as np
import random
import scipy.sparse as sp
from construct_sub_graph import sub_graph
from model.encoder import EncoderAtten1Layer
from model.decoder_feature import DNN as DecoderFeature
print('开始训练')
execAdj = sp.load_npz('trainData/execAdj.npz')
execAdj = execAdj.tocsr()
fileAdj = sp.load_npz('trainData/fileAdj.npz')
fileAdj = fileAdj.tocsr()
feature = np.load('trainData/execFeature.npy')
exec_graph = sub_graph(execAdj, feature)
file_graph = sub_graph(fileAdj, feature)
het_adj = execAdj + fileAdj
het_graph = sub_graph(het_adj, feature)
feature_dim = feature.shape[1]
node_num = feature.shape[0]
file_sum = fileAdj.sum(axis=1)
file_nodes = []
for i in range(len(file_sum)):
if file_sum[i][0] != 0:
file_nodes.append(i)
class Train:
def __init__(self, gcn_h1_dim, gcn_h2_dim, gcn_h3_dim, learn_rate1=0.001, learn_rate2=0.001, weight_decay=0.001):
self.encoder = EncoderAtten1Layer(feature_dim, gcn_h1_dim, gcn_h2_dim, gcn_h3_dim)
self.decoder = DecoderFeature(gcn_h3_dim, gcn_h2_dim, gcn_h1_dim, feature_dim)
self.loss_fn_feature = torch.nn.MSELoss(reduction='mean')
self.loss_fn_adj = torch.nn.MSELoss(reduction='none')
if torch.cuda.is_available():
device = torch.device('cuda:0')
self.decoder = self.decoder.to(device)
self.encoder = self.encoder.to(device)
self.optimizer_encoder = optim.Adam(
[{'params': self.encoder.parameters(), 'lr': learn_rate1}],
weight_decay=weight_decay)
self.optimizer_decoder = optim.Adam(
[{'params': self.decoder.parameters(), 'lr': learn_rate2}],
weight_decay=weight_decay)
def get_embedding(self, node):
exec_adj, exec_feature, _, _, _, exec_mask = exec_graph.construct(node, 1)
file_adj, file_feature, _, _, _, file_mask = file_graph.construct(node, 1)
if torch.cuda.is_available():
exec_adj = exec_adj.cuda()
exec_feature = exec_feature.cuda()
file_adj = file_adj.cuda()
file_feature = file_feature.cuda()
z, _, _, _ = self.encoder(exec_feature, exec_adj, file_feature, file_adj, exec_mask, file_mask)
return z
def batch_loss_adj(self, feature, raw_adj, re_adj):
feature_norm = F.normalize(feature, p=2, dim=1)
feature_sim = feature_norm @ feature_norm.t()
if torch.cuda.is_available():
sim_mask = torch.where(feature_sim > 0.8, torch.tensor([0]).cuda(), torch.tensor([1]).cuda())
else:
sim_mask = torch.where(feature_sim > 0.8, torch.tensor([0]), torch.tensor([1]))
sim_mask = sim_mask.float()
sim_mask += raw_adj
if torch.cuda.is_available():
sim_mask = torch.where(sim_mask > 0, torch.tensor([1]).cuda(), torch.tensor([0]).cuda())
else:
sim_mask = torch.where(sim_mask > 0, torch.tensor([1]), torch.tensor([0]))
adj_loss = self.loss_fn_adj(re_adj * sim_mask, raw_adj).sum() / sim_mask.sum()
return adj_loss
def batch_loss(self, ids, a=0.05, b=0.05):
ids = list(set(ids))
ids.sort()
_, het_feature, _, raw_het_adj, nodes, _ = het_graph.construct(ids, 1)
if torch.cuda.is_available():
het_feature = het_feature.cuda()
raw_het_adj = raw_het_adj.cuda()
z = self.get_embedding(nodes)
re_feature = self.decoder(z)
z = F.normalize(z, p=2, dim=1)
re_adj = z @ z.t()
re_het_adj = (re_adj + 1) / 2
feature_loss = self.loss_fn_feature(re_feature, het_feature)
adj_loss = self.batch_loss_adj(het_feature, raw_het_adj, re_het_adj)
return feature_loss, adj_loss
def train(self, batch_size=100, t=1000):
node_list = list(range(node_num))
random.shuffle(node_list)
random.shuffle(file_nodes)
start = 0
file_start = 0
data_set = []
while start < (node_num - batch_size):
if file_start > (len(file_nodes) - batch_size):
data_set.append(file_nodes[file_start:])
file_start = 0
else:
data_set.append(file_nodes[file_start:file_start + batch_size])
file_start += batch_size
for _ in range(6):
if start >= (node_num - batch_size):
break
data_set.append(node_list[start: start + batch_size])
start += batch_size
if start < node_num:
data_set.append(node_list[start:])
try:
count = 0
best = 0
for times in range(t):
self.encoder.train()
self.decoder.train()
for i in range(len(data_set)):
count += 1
print("epoch:%s, batch:%s" % (times, i))
loss_fea, loss_adj = self.batch_loss(data_set[i])
self.optimizer_encoder.zero_grad()
loss_adj.backward(retain_graph=True)
self.optimizer_encoder.step()
self.optimizer_decoder.zero_grad()
self.optimizer_encoder.zero_grad()
loss_fea.backward()
self.optimizer_decoder.step()
if count == 100:
torch.save(self.decoder, 'save_model/decoder' + str(times))
torch.save(self.encoder, 'save_model/encoder' + str(times))
count = 0
except KeyboardInterrupt or MemoryError or RuntimeError:
torch.save(self.decoder, 'save_model/decoder')
torch.save(self.encoder, 'save_model/encoder')
return self.decoder, self.encoder
SEED = 5000
random.seed(SEED)
torch.manual_seed(SEED)
torch.cuda.manual_seed(SEED)
np.random.seed(SEED)
train_ = Train(100, 90, 80, 0.001, 0.001, 0.000)
decoder, encoder = train_.train(batch_size=8, t=10)
torch.save(decoder, 'save_model/decoder')
torch.save(encoder, 'save_model/encoder')
| [
"[email protected]"
] | |
9fccada480118eb7f3986aa4174be26962a8b93a | 71d757af0de13c7f4a0fa39578c3abea3451372b | /learning/backup_gagd.py | bd15cb8ec9b5986b6f791dbfb99a3abbfeb18f86 | [] | no_license | bh0085/compbio | 039421d04317ae878f222d6448144be88aa95f69 | 95bc24ea34346ff4b9a120e317d08518277f268c | refs/heads/master | 2021-03-12T19:57:22.421467 | 2013-02-05T16:45:37 | 2013-02-05T16:45:37 | 947,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,841 | py | from pyfann import libfann as fann
class MyFANN():
def __init__(self,xdat,ydat,idxs):
if shape(xdat)[0] != shape(ydat)[0]:
raise Exception('dimension mismatch b/w x, y')
nt = len(xdat)
ny = shape(ydat)[1]
nx = shape(xdat)[1]
num_input = nx;
num_output = ny;
num_layers = 3;
num_neurons_hidden = 3;
desired_error = 0.2;
max_epochs =2000;
epochs_between_reports = 1000;
net = fann.neural_net()
net.create_standard_array([num_layers, num_input, num_neurons_hidden, num_output]);
net.set_activation_function_hidden( fann.SIGMOID_SYMMETRIC);
net.set_activation_function_output( fann.SIGMOID_SYMMETRIC);
t = fann.training_data()
t.set_train_data(xdat,ydat)
nt = net.train_on_data(t,max_epochs,epochs_between_reports,desired_error)
out = net.save( "xor_float.net");
print net.get_training_algorithm()
raise Exception()
fann.train_on_file( "xor.data", max_epochs, epochs_between_reports, desired_error);
out = net.save( "xor_float.net");
net.destroy();
from pyevolve import G1DList, GSimpleGA, Selectors, Scaling, DBAdapters
#from random import seed, randint, random
def eval_polynomial(x, *coefficients):
result = 0
for exponent, coeff in enumerate(coefficients):
result += coeff*x**exponent
return result
def generate_fitness_function(sample_points):
def fitness_function(chromosome):
score = 0
for point in sample_points:
delta = abs(eval_polynomial(point[0], *chromosome) - point[1])
score += delta
score = -score
return score
return fitness_function
def run_pfit():
# Generate a random polynomial, and generate sample points from it
seed()
source_polynomial = []
for i in xrange(randint(1, 5)):
source_polynomial.append(randint(-20,20))
sample_points = []
for i in xrange(20):
n = randint(-100, 100)
sample_points.append((n, eval_polynomial(n, *source_polynomial)))
# Create the population
genome = G1DList.G1DList(5)
genome.evaluator.set(generate_fitness_function(sample_points))
genome.setParams(rangemin=-50, rangemax=50)
# Set up the engine
ga = GSimpleGA.GSimpleGA(genome)
ga.setPopulationSize(1000)
ga.selector.set(Selectors.GRouletteWheel)
# Change the scaling method
pop = ga.getPopulation()
pop.scaleMethod.set(Scaling.SigmaTruncScaling)
# Start the algorithm, and print the results.
ga.evolve(freq_stats=10)
print(ga.bestIndividual())
print("Source polynomial: " + repr(source_polynomial))
print("Sample points: " + repr(sample_points))
| [
"[email protected]"
] | |
934da6a4eebd005c15e84c6a79e859acb2d7892c | 47ed25486436dbce4b9bac62dba1f83bad884b27 | /models/feed.py | d6c9fff250f844a68542944238c7416fe544bd26 | [] | no_license | hackerrithm/Quasar | 376f5e79bbc6eb9e0d4e2996f5b4656daff775be | d843ed8085af3ce91a81de5bfa2642275fc1811e | refs/heads/master | 2021-01-24T11:23:39.875005 | 2016-10-07T16:48:31 | 2016-10-07T16:48:31 | 70,228,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | py | class feed(object):
"""description of class"""
| [
"[email protected]"
] | |
6c67dfbe348126447354bd125a22c8c109b0ab15 | a6bd7d3c2dfd6f22b22b7390a2230651e1f3febd | /1.py | 412b8d7720f095722caac5fb02499d4d2a29fbb3 | [] | no_license | NicolasQueiroga/Resolucao_PF_DesSoft--2020.1 | 5c9e8b8a19045763c5af1e32426fa4e2c1891096 | fcafa170b0cec6dcaa658c3c72746d51ed8acc88 | refs/heads/master | 2022-11-06T14:15:31.544152 | 2020-06-22T21:02:12 | 2020-06-22T21:02:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | with open('criptografado.txt', 'r') as arquivo:
conteudo = arquivo.readlines()
for i in conteudo:
i = i.strip()
letra = [char for char in i]
for e in range(len(letra)):
if letra[e] == 's':
letra[e] = 'z'
elif letra[e] == 'a':
letra[e] = 'e'
elif letra[e] == 'r':
letra[e] = 'b'
elif letra[e] == 'b':
letra[e] = 'r'
elif letra[e] == 'e':
letra[e] = 'a'
elif letra[e] == 'z':
letra[e] = 's'
new = ''.join(letra)
print(new)
| [
"[email protected]"
] | |
f54d7f9ceffb65a98b2f18defa7ce00de08c4646 | 67e0bd533c87144d0b3dccc37172a482bc853805 | /usermgmt/migrations/0001_initial.py | 68a227aae35ae167f9bc7308c2371681df0a357d | [] | no_license | jndinh/assignment2_backend | 2d0fbea3d02d4414c0dd481fb02e9a3a29c10f24 | 694bbe8c7ac7c1c22aaafa5a1a7dcb80d14a7bd5 | refs/heads/master | 2022-12-11T18:37:15.611109 | 2018-05-03T00:22:39 | 2018-05-03T00:22:39 | 129,988,708 | 0 | 0 | null | 2022-12-08T00:45:00 | 2018-04-18T01:58:16 | Python | UTF-8 | Python | false | false | 830 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-04-18 01:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=20, unique=True)),
('password', models.CharField(max_length=20)),
('latitude', models.FloatField(blank=True, null=True)),
('longitude', models.FloatField(blank=True, null=True)),
('timestamp', models.BigIntegerField()),
],
),
]
| [
"[email protected]"
] | |
cf4869a008091dac50e4e6d07bded0da84f85bb3 | 2bcf18252fa9144ece3e824834ac0e117ad0bdf3 | /zpt/trunk/site-packages/zpt/_pytz/zoneinfo/Asia/Ulan_Bator.py | 23ee14fe6b126706fac6097086cd541788e4110c | [
"MIT",
"ZPL-2.1"
] | permissive | chadwhitacre/public | 32f65ba8e35d38c69ed4d0edd333283a239c5e1d | 0c67fd7ec8bce1d8c56c7ff3506f31a99362b502 | refs/heads/master | 2021-05-10T14:32:03.016683 | 2010-05-13T18:24:20 | 2010-05-13T18:24:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,011 | py | '''tzinfo timezone information for Asia/Ulan_Bator.'''
from zpt._pytz.tzinfo import DstTzInfo
from zpt._pytz.tzinfo import memorized_datetime as d
from zpt._pytz.tzinfo import memorized_ttinfo as i
class Ulan_Bator(DstTzInfo):
'''Asia/Ulan_Bator timezone definition. See datetime.tzinfo for details'''
zone = 'Asia/Ulan_Bator'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1905,7,31,16,52,28),
d(1977,12,31,17,0,0),
d(1983,3,31,16,0,0),
d(1983,9,30,15,0,0),
d(1984,3,31,16,0,0),
d(1984,9,29,18,0,0),
d(1985,3,30,18,0,0),
d(1985,9,28,18,0,0),
d(1986,3,29,18,0,0),
d(1986,9,27,18,0,0),
d(1987,3,28,18,0,0),
d(1987,9,26,18,0,0),
d(1988,3,26,18,0,0),
d(1988,9,24,18,0,0),
d(1989,3,25,18,0,0),
d(1989,9,23,18,0,0),
d(1990,3,24,18,0,0),
d(1990,9,29,18,0,0),
d(1991,3,30,18,0,0),
d(1991,9,28,18,0,0),
d(1992,3,28,18,0,0),
d(1992,9,26,18,0,0),
d(1993,3,27,18,0,0),
d(1993,9,25,18,0,0),
d(1994,3,26,18,0,0),
d(1994,9,24,18,0,0),
d(1995,3,25,18,0,0),
d(1995,9,23,18,0,0),
d(1996,3,30,18,0,0),
d(1996,9,28,18,0,0),
d(1997,3,29,18,0,0),
d(1997,9,27,18,0,0),
d(1998,3,28,18,0,0),
d(1998,9,26,18,0,0),
d(2001,4,27,18,0,0),
d(2001,9,28,17,0,0),
d(2002,3,29,18,0,0),
d(2002,9,27,17,0,0),
d(2003,3,28,18,0,0),
d(2003,9,26,17,0,0),
d(2004,3,26,18,0,0),
d(2004,9,24,17,0,0),
d(2005,3,25,18,0,0),
d(2005,9,23,17,0,0),
d(2006,3,24,18,0,0),
d(2006,9,29,17,0,0),
d(2007,3,30,18,0,0),
d(2007,9,28,17,0,0),
d(2008,3,28,18,0,0),
d(2008,9,26,17,0,0),
d(2009,3,27,18,0,0),
d(2009,9,25,17,0,0),
d(2010,3,26,18,0,0),
d(2010,9,24,17,0,0),
d(2011,3,25,18,0,0),
d(2011,9,23,17,0,0),
d(2012,3,30,18,0,0),
d(2012,9,28,17,0,0),
d(2013,3,29,18,0,0),
d(2013,9,27,17,0,0),
d(2014,3,28,18,0,0),
d(2014,9,26,17,0,0),
d(2015,3,27,18,0,0),
d(2015,9,25,17,0,0),
d(2016,3,25,18,0,0),
d(2016,9,23,17,0,0),
d(2017,3,24,18,0,0),
d(2017,9,29,17,0,0),
d(2018,3,30,18,0,0),
d(2018,9,28,17,0,0),
d(2019,3,29,18,0,0),
d(2019,9,27,17,0,0),
d(2020,3,27,18,0,0),
d(2020,9,25,17,0,0),
d(2021,3,26,18,0,0),
d(2021,9,24,17,0,0),
d(2022,3,25,18,0,0),
d(2022,9,23,17,0,0),
d(2023,3,24,18,0,0),
d(2023,9,29,17,0,0),
d(2024,3,29,18,0,0),
d(2024,9,27,17,0,0),
d(2025,3,28,18,0,0),
d(2025,9,26,17,0,0),
d(2026,3,27,18,0,0),
d(2026,9,25,17,0,0),
d(2027,3,26,18,0,0),
d(2027,9,24,17,0,0),
d(2028,3,24,18,0,0),
d(2028,9,29,17,0,0),
d(2029,3,30,18,0,0),
d(2029,9,28,17,0,0),
d(2030,3,29,18,0,0),
d(2030,9,27,17,0,0),
d(2031,3,28,18,0,0),
d(2031,9,26,17,0,0),
d(2032,3,26,18,0,0),
d(2032,9,24,17,0,0),
d(2033,3,25,18,0,0),
d(2033,9,23,17,0,0),
d(2034,3,24,18,0,0),
d(2034,9,29,17,0,0),
d(2035,3,30,18,0,0),
d(2035,9,28,17,0,0),
d(2036,3,28,18,0,0),
d(2036,9,26,17,0,0),
d(2037,3,27,18,0,0),
d(2037,9,25,17,0,0),
]
_transition_info = [
i(25680,0,'LMT'),
i(25200,0,'ULAT'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
]
Ulan_Bator = Ulan_Bator()
| [
"[email protected]"
] | |
0d4c7f4d593f7dd6202682d0b22e198ebfd53434 | 80ae5df6106e16f22c423ab4319170d103951599 | /pyKairosDB/tests/test_graphite_style_read.py | 2b132206e3903b5bf99bb5ab6ed38b3ae804e773 | [
"Apache-2.0"
] | permissive | justmedude/pyKairosDB | d096dd074a770f7dd5b247de115d50e5250d7eaa | 4c17f551a3e8daf0fa407c8362f6da0f7343784f | refs/heads/master | 2021-01-18T07:30:03.642998 | 2020-06-07T17:23:20 | 2020-06-07T17:23:20 | 33,954,605 | 0 | 0 | NOASSERTION | 2020-06-07T17:23:22 | 2015-04-14T20:14:21 | Python | UTF-8 | Python | false | false | 802 | py | #!/usr/bin/env python
import time
import pyKairosDB
from pyKairosDB import util as util
from pyKairosDB import graphite
# use this after the following read test has settled:
# ipython pyKairosDB/tests/test_graphite_write.py test.bar.baz
# ipython pyKairosDB/tests/test_graphite_write.py test.bar.bar
# ipython pyKairosDB/tests/test_graphite_write.py test.bar.foo
# ipython pyKairosDB/tests/test_graphite_write.py test.bar.bat
#
c = pyKairosDB.connect() # use localhost:8080, the default, no ssl
start_time = time.time() - 3600
end_time = time.time()
metrics_list = graphite.expand_graphite_wildcard_metric_name(c, "test.*.*.*")
(timeinfo, datapoints) = graphite.read_absolute(c, metrics_list[0], start_time, end_time)
print "Datapoints are:"
print datapoints
print "Timeinfo is:"
print timeinfo | [
"[email protected]"
] | |
1505fa6f68042b2c947fc0d69cb9a44f0e2ec8bf | b4b2cb10ecfbe0f62615e750aad140b2e57c727d | /rc_board/recipes/migrations/0001_initial.py | efb029ecaec4bf0ba6fb42e15896c688f4969998 | [] | no_license | apustovitin/rb | 8694a7d5167a1c9180c175d3442222c1bda7691e | 2150ab15e03f63e321f5acd3f98e3605e8e554bf | refs/heads/main | 2023-08-28T20:03:13.603134 | 2021-11-01T00:23:32 | 2021-11-01T00:23:32 | 417,274,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,258 | py | # Generated by Django 3.2.8 on 2021-10-14 19:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(choices=[('FML', 'Первые блюда'), ('SCS', 'Вторые блюда'), ('SLD', 'Салаты'), ('SNC', 'Закуски'), ('DSR', 'Десерты'), ('BVR', 'Напитки')], max_length=3, unique=True)),
],
),
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_datetime', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(max_length=255)),
('content', models.TextField()),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='recipes.category')),
],
),
]
| [
"[email protected]"
] | |
5bc3f6bce2c26a92d231ccc40f81cff77b91934c | dfdb41c035447ac085f6b650e2179dc46ea45a5c | /aiohttp_socks/core_socks/_proxy_sync.py | 4f4e1765eb868b71e3a102675abd45518a14e59c | [
"Apache-2.0"
] | permissive | maemo-leste-extras/aiohttp-socks | d7391acc2df9654531f9f2510f1ffc1793312ad9 | 4771b8bc73bd85f41a158600171b931de8e0c414 | refs/heads/master | 2023-03-05T20:30:15.237710 | 2020-09-24T06:44:00 | 2020-09-24T06:44:00 | 335,752,625 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,379 | py | import socket
from ._types import ProxyType
from ._errors import ProxyConnectionError, ProxyTimeoutError
from ._helpers import parse_proxy_url
from ._stream_sync import SyncSocketStream
from ._proto_socks5_sync import Socks5Proto
from ._proto_http_sync import HttpProto
from ._proto_socks4_sync import Socks4Proto
DEFAULT_TIMEOUT = 60
class SyncProxy:
def connect(self, dest_host, dest_port, timeout=None, _socket=None):
raise NotImplementedError() # pragma: no cover
@property
def proxy_host(self):
raise NotImplementedError() # pragma: no cover
@property
def proxy_port(self):
raise NotImplementedError() # pragma: no cover
class Proxy:
@classmethod
def create(cls, proxy_type: ProxyType, host: str, port: int,
username: str = None, password: str = None,
rdns: bool = None) -> SyncProxy:
if proxy_type == ProxyType.SOCKS4:
return Socks4Proxy(
proxy_host=host,
proxy_port=port,
user_id=username,
rdns=rdns
)
if proxy_type == ProxyType.SOCKS5:
return Socks5Proxy(
proxy_host=host,
proxy_port=port,
username=username,
password=password,
rdns=rdns
)
if proxy_type == ProxyType.HTTP:
return HttpProxy(
proxy_host=host,
proxy_port=port,
username=username,
password=password
)
raise ValueError('Invalid proxy type: ' # pragma: no cover
'{}'.format(proxy_type))
@classmethod
def from_url(cls, url: str, **kwargs) -> SyncProxy:
proxy_type, host, port, username, password = parse_proxy_url(url)
return cls.create(
proxy_type=proxy_type,
host=host,
port=port,
username=username,
password=password,
**kwargs
)
class BaseProxy(SyncProxy):
def __init__(self, proxy_host, proxy_port):
self._proxy_host = proxy_host
self._proxy_port = proxy_port
self._dest_host = None
self._dest_port = None
self._timeout = None
self._stream = SyncSocketStream()
def connect(self, dest_host, dest_port, timeout=None, _socket=None):
if timeout is None:
timeout = DEFAULT_TIMEOUT
self._dest_host = dest_host
self._dest_port = dest_port
self._timeout = timeout
try:
self._stream.open_connection(
host=self._proxy_host,
port=self._proxy_port,
timeout=timeout,
_socket=_socket
)
self._negotiate()
except socket.timeout as e:
self._stream.close()
raise ProxyTimeoutError('Proxy connection timed out: %s'
% self._timeout) from e
except OSError as e:
self._stream.close()
msg = ('Can not connect to proxy %s:%s [%s]' %
(self._proxy_host, self._proxy_port, e.strerror))
raise ProxyConnectionError(e.errno, msg) from e
except Exception:
self._stream.close()
raise
return self._stream.socket
def _negotiate(self):
proto = self._create_proto()
proto.negotiate()
def _create_proto(self):
raise NotImplementedError() # pragma: no cover
@property
def proxy_host(self):
return self._proxy_host
@property
def proxy_port(self):
return self._proxy_port
class Socks5Proxy(BaseProxy):
def __init__(self, proxy_host, proxy_port,
username=None, password=None, rdns=None):
super().__init__(
proxy_host=proxy_host,
proxy_port=proxy_port
)
self._username = username
self._password = password
self._rdns = rdns
def _create_proto(self):
return Socks5Proto(
stream=self._stream,
dest_host=self._dest_host,
dest_port=self._dest_port,
username=self._username,
password=self._password,
rdns=self._rdns
)
class Socks4Proxy(BaseProxy):
def __init__(self, proxy_host, proxy_port,
user_id=None, rdns=None):
super().__init__(
proxy_host=proxy_host,
proxy_port=proxy_port
)
self._user_id = user_id
self._rdns = rdns
def _create_proto(self):
return Socks4Proto(
stream=self._stream,
dest_host=self._dest_host,
dest_port=self._dest_port,
user_id=self._user_id,
rdns=self._rdns
)
class HttpProxy(BaseProxy):
def __init__(self, proxy_host, proxy_port, username=None, password=None):
super().__init__(
proxy_host=proxy_host,
proxy_port=proxy_port
)
self._username = username
self._password = password
def _create_proto(self):
return HttpProto(
stream=self._stream,
dest_host=self._dest_host,
dest_port=self._dest_port,
username=self._username,
password=self._password
)
| [
"[email protected]"
] | |
1b406b2dc38004db14248af19fb7f7be9b8e7f6c | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/BuildLinks1.10/test_input/CJ_16_1/16_1_1_FreeTShirt_a.py | 0207b362ff64f55d6e7a49f758c368374d2c5dc1 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 404 | py | def argmax(s):
z = max(s)
return [(idx, c) for idx, c in enumerate(s) if c == z]
def last(s):
if len(s) <= 1:
return s
return max([s[idx]+last(s[:idx])+s[idx+1:] for idx, c in argmax(s)])
fw = open('a-o', 'w')
for idx, line in enumerate(open('A-small-i')):
if idx == 0:
continue
s = line.strip()
print(s)
fw.write('Case #{0}: {1}\n'.format(idx,last(s)))
| [
"[[email protected]]"
] | |
ef5a7badcd93c97e6745d7e0e52875269113aa69 | dd73504f6cde1581db1c6526dfc8ae55fb9a5658 | /wageshare_summary_cleaned_market.py | 27d233cceeebdc3df3e81b489f2e729ea354cecc | [] | no_license | jabramsky/jabramsky.github.io | efafa07d2918240b527a28ac0cb96eac66536e20 | e771b4df00722e47118e73e2cf61b6333fa00646 | refs/heads/master | 2021-01-22T02:53:32.324009 | 2017-05-02T04:05:15 | 2017-05-02T04:05:15 | 81,079,781 | 0 | 0 | null | 2017-05-02T04:05:16 | 2017-02-06T11:20:59 | HTML | UTF-8 | Python | false | false | 1,324 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 19 15:11:55 2017
@author: User
"""
import pandas as pd
import matplotlib.pyplot as plt
import statsmodels.formula.api as smf
data = pd.read_csv('timeseries_nototals.csv')
data.head()
exc_inds = (94, 95, 96)
for i in exc_inds:
exc_data = data[(data['NUMBER'] != i)]
cleaned_data = exc_data[(exc_data['GVA'] > exc_data['COE'])]
needed_data = cleaned_data[['DATE', 'WAGESHARE', 'SERVICE']]
nonservice_data = needed_data[(needed_data['SERVICE'] == 0)]
service_data = needed_data[(needed_data['SERVICE'] == 1)]
print('Wage shares in non-service industries, whole timeseries:')
print(nonservice_data.describe())
print('Wage shares in service industries, whole timeseries:')
print(service_data.describe())
yearlist = []
for y in range(1997, 2015):
yeardata = needed_data[(needed_data['DATE'] == y)]
nonservice_data = yeardata[(yeardata['SERVICE'] == 0)]
n_yearsharedata = nonservice_data[['WAGESHARE']]
service_data = yeardata[(yeardata['SERVICE'] == 1)]
yearsharedata = service_data[['WAGESHARE']]
yearlist.append((y, n_yearsharedata.describe(), yearsharedata.describe()))
for y in yearlist:
print(y[0], ":")
print('Non-service industries:')
print(y[1])
print('Service industries:')
print(y[2]) | [
"[email protected]"
] | |
8732c9af3fea83ea57fa51e58d56b098749760f6 | 6561baa7ca68875e62fbf2d20c7887e4aadebe9f | /tests/cds_test_20_sf_ukmo.py | efa292077e335becd6970c33d7b3c44900ea5f35 | [
"Apache-2.0"
] | permissive | EXWEXs/cfgrib | 9057c9e5abbc38a32f113f832f1506988839ee82 | 8a1727af2c3bbcf2e17f250dfafcb4cc4e959354 | refs/heads/master | 2020-04-01T15:44:45.140700 | 2018-10-14T14:39:13 | 2018-10-14T14:39:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,089 | py |
import pytest
import cfgrib
import cdscommon
TEST_FILES = {
'seasonal-original-single-levels-ukmo': [
'seasonal-original-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'year': '2018',
'month': ['04', '05'],
'day': [
'01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',
'25', '26', '27', '28', '29', '30', '31'
],
'leadtime_hour': ['24', '48'],
'grid': ['3', '3'],
'format': 'grib',
},
192,
],
'seasonal-original-pressure-levels-ukmo': [
'seasonal-original-pressure-levels',
{
'originating_centre': 'ukmo',
'variable': 'temperature',
'pressure_level': ['500', '850'],
'year': '2018',
'month': ['04', '05'],
'day': [
'01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',
'25', '26', '27', '28', '29', '30', '31'
],
'leadtime_hour': ['24', '48'],
'grid': ['3', '3'],
'format': 'grib',
},
192,
],
'seasonal-postprocessed-single-levels-ukmo': [
'seasonal-postprocessed-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours_anomaly',
'product_type': 'monthly_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-monthly_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'monthly_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-ensemble_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'ensemble_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-hindcast_climate_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'hindcast_climate_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
}
@pytest.mark.parametrize('test_file', TEST_FILES.keys())
def test_reanalysis_Stream(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
stream = cfgrib.FileStream(path)
leader = stream.first()
assert len(leader) == key_count
assert sum(1 for _ in stream) == leader['count']
@pytest.mark.parametrize('test_file', TEST_FILES.keys())
def test_reanalysis_Dataset(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
res = cfgrib.xarray_store.open_dataset(path, flavour_name='cds')
res.to_netcdf(path[:-5] + '.nc')
| [
"[email protected]"
] | |
85ef73de5c1fceffd5aff452e2b9902d1718602f | 5ca6730fa1178582d5f5875155f340ec0f406294 | /practice_problem-16.py | 44785ae4df282d5b7cc6f83173866d825eb41375 | [] | no_license | MahadiRahman262523/Python_Code_Part-1 | 9740d5ead27209d69af4497eea410f2faef50ff3 | e2f08e3d0564a003400743ae6050fd687c280639 | refs/heads/main | 2023-07-25T09:10:53.649082 | 2021-09-05T19:39:14 | 2021-09-05T19:39:14 | 403,396,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | # Write a program to count the number of zeros in the following tuple:
# a = (7,0,8,0,0,9)
a = (7,0,8,0,0,9)
print(a.count(0)) | [
"[email protected]"
] | |
8689b6823245a63ca4d654d51b9574331fa99dd9 | 7dcc175cae0cb34d8281dad566a2be4b9253224e | /Assignment2/Logistic Regression/logistic_regression.py | d3919204db3edda3e086488ffa9474886bd99c6a | [] | no_license | pratyushbanerjee/ml-assignment | 48e07b1765e822a4ecd71238584eb4b2acf83f0c | aa198cd92604b70b662bf7a8ba050d142027611b | refs/heads/master | 2022-11-26T09:20:18.118273 | 2020-08-05T21:25:40 | 2020-08-05T21:25:40 | 285,405,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,590 | py | #L Regression
#importing libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import random
import statistics
#importing the dataset
image_detection=pd.read_csv('data_banknote_authentication_excel.csv')
#splitting the data into testing and training data
test=image_detection.sample(274)
train = image_detection[~image_detection.isin(test)]
train.dropna(inplace = True)
#defining sigmoid function,the loss function
def sigmoid(x):
return 1/(1+np.exp(-x))
def square_loss(x,y):
z=y-x
return np.mean(pow(z,2))
def scale(x):
mean=statistics.mean(x)
variance=statistics.variance(x)
for i in range(0,len(x)):
x[i]=(x[i]-mean)/(variance)
return x
scaled=scale(image_detection.entropy)
#creating testing and training variables, as well as the dependant(class) and the independant variables(entropy)
x_tr,y_tr=train.entropy,train['class']
x_te,y_te=test.entropy,test['class']
#Implementing Gradient Descent algorithm
lr = 0.01 #learning late
const=np.random.uniform(0,1)
W =const+np.random.uniform(0,1) # colom 1
b = 0.1
for i in range(10000):
z = np.dot(x_tr, W) + b
y_pred = sigmoid(z)
l = square_loss(y_pred, y_tr)
gradient_W = np.dot((y_pred-y_tr).T, x_tr)/x_tr.shape[0]
gradient_b = np.mean(y_pred-y_tr)
W = W-lr * gradient_W
b = b-lr* gradient_b
#implementing the sigmoid function
for i in range(len(x_te)):
r = sigmoid(np.dot(x_te, W)+b)
#filling up the model results in the class_1 list
class_1=[]
for i in range(0,len(r)):
if r[i]<0.5:
class_1.append(0)
else:
class_1.append(1)
#number of zeroes and ones according to our model
nummodel_1=0
nummodel_2=0
for i in range(0,len(class_1)):
if class_1[i]==0:
nummodel_1=nummodel_1+1
else:
nummodel_2=nummodel_2+1
#number of atual zeroes and ones in the dataset
a=test['class']
numoriginal_1=0
numoriginal_2=0
for i in a:
if i==0:
numoriginal_1=numoriginal_1+1
else:
numoriginal_2=numoriginal_2+1
#Finding out their F Score and Accuracy
TP=0
TN=0
FP=0
FN=0
if (nummodel_2>numoriginal_2):
FP=nummodel_2-numoriginal_2
FN=0
TP=numoriginal_2
TN=nummodel_1
else:
FN=nummodel_1-numoriginal_1
FP=0
TN=numoriginal_1
TP=nummodel_2
accuracy= (TP+TN)/(TN+TP+FP+FN)
precision=TP/(TP+FP)
recall=TP/(TP+FN)
F_score=2*precision*recall/(precision+recall)
#L_1 Regression
#importing libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import random
import statistics
#importing the dataset
image_detection=pd.read_csv('C:/Users/hp/Desktop/data_banknote_authentication_excel.csv')
#splitting the data into testing and training data
test=image_detection.sample(274)
train = image_detection[~image_detection.isin(test)]
train.dropna(inplace = True)
#defining sigmoid function,the loss function
def sigmoid(x):
return 1/(1+np.exp(-x))
def square_loss(x,y):
z=y-x
return np.mean(pow(z,2))
def scale(x):
mean=statistics.mean(x)
variance=statistics.variance(x)
for i in range(0,len(x)):
x[i]=(x[i]-mean)/(variance)
return x
scaled=scale(image_detection.entropy)
#creating testing and training variables, as well as the dependant(class) and the independant variables(entropy)
x_tr,y_tr=train.entropy,train['class']
x_te,y_te=test.entropy,test['class']
#Implementing Gradient Descent algorithm
lr = 0.01 #learning late
const=np.random.uniform(0,1)
W =const+np.random.uniform(0,1) # colom 1
L1_coeff=5
b = 0.1
for i in range(10000):
z = np.dot(x_tr, W) + b
y_pred = sigmoid(z)
l = square_loss(y_pred, y_tr)
gradient_W = np.dot((y_pred-y_tr).T, x_tr)/x_tr.shape[0]+L1_coeff*np.sign(W)
gradient_b = np.mean(y_pred-y_tr)
W = W-lr * gradient_W
b = b-lr* gradient_b
#implementing the sigmoid function
for i in range(len(x_te)):
r = sigmoid(np.dot(x_te, W)+b)
#filling up the model results in the class_1 list
class_1=[]
for i in range(0,len(r)):
if r[i]<0.5:
class_1.append(0)
else:
class_1.append(1)
#number of zeroes and ones according to our model
nummodel_1=0
nummodel_2=0
for i in range(0,len(class_1)):
if class_1[i]==0:
nummodel_1=nummodel_1+1
else:
nummodel_2=nummodel_2+1
#number of atual zeroes and ones in the dataset
a=test['class']
numoriginal_1=0
numoriginal_2=0
for i in a:
if i==0:
numoriginal_1=numoriginal_1+1
else:
numoriginal_2=numoriginal_2+1
#Finding out their F Score and Accuracy
TP=0
TN=0
FP=0
FN=0
if (nummodel_2>numoriginal_2):
FP=nummodel_2-numoriginal_2
FN=0
TP=numoriginal_2
TN=nummodel_1
else:
FN=nummodel_1-numoriginal_1
FP=0
TN=numoriginal_1
TP=nummodel_2
accuracy= (TP+TN)/(TN+TP+FP+FN)
precision=TP/(TP+FP)
recall=TP/(TP+FN)
F_score=2*precision*recall/(precision+recall)
#L_2 Regression
#importing libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import random
import statistics
#importing the dataset
image_detection=pd.read_csv('C:/Users/hp/Desktop/data_banknote_authentication_excel.csv')
#splitting the data into testing and training data
test=image_detection.sample(274)
train = image_detection[~image_detection.isin(test)]
train.dropna(inplace = True)
#defining sigmoid function,the loss function
def sigmoid(x):
return 1/(1+np.exp(-x))
def square_loss(x,y):
z=y-x
return np.mean(pow(z,2))
def scale(x):
mean=statistics.mean(x)
variance=statistics.variance(x)
for i in range(0,len(x)):
x[i]=(x[i]-mean)/(variance)
return x
scaled=scale(image_detection.entropy)
#creating testing and training variables, as well as the dependant(class) and the independant variables(entropy)
x_tr,y_tr=train.entropy,train['class']
x_te,y_te=test.entropy,test['class']
#Implementing Gradient Descent algorithm
lr = 0.01 #learning late
const=np.random.uniform(0,1)
W =const+np.random.uniform(0,1) # colom 1
L2_coeff=5
b = 0.1
for i in range(10000):
z = np.dot(x_tr, W) + b
y_pred = sigmoid(z)
l = square_loss(y_pred, y_tr)
gradient_W = np.dot((y_pred-y_tr).T, x_tr)/x_tr.shape[0]+L2_coeff*2*W
gradient_b = np.mean(y_pred-y_tr)
W = W-lr * gradient_W
b = b-lr* gradient_b
#implementing the sigmoid function
for i in range(len(x_te)):
r = sigmoid(np.dot(x_te, W)+b)
#filling up the model results in the class_1 list
class_1=[]
for i in range(0,len(r)):
if r[i]<0.5:
class_1.append(0)
else:
class_1.append(1)
#number of zeroes and ones according to our model
nummodel_1=0
nummodel_2=0
for i in range(0,len(class_1)):
if class_1[i]==0:
nummodel_1=nummodel_1+1
else:
nummodel_2=nummodel_2+1
#number of atual zeroes and ones in the dataset
a=test['class']
numoriginal_1=0
numoriginal_2=0
for i in a:
if i==0:
numoriginal_1=numoriginal_1+1
else:
numoriginal_2=numoriginal_2+1
#Finding out their F Score and Accuracy
TP=0
TN=0
FP=0
FN=0
if (nummodel_2>numoriginal_2):
FP=nummodel_2-numoriginal_2
FN=0
TP=numoriginal_2
TN=nummodel_1
else:
FN=nummodel_1-numoriginal_1
FP=0
TN=numoriginal_1
TP=nummodel_2
accuracy= (TP+TN)/(TN+TP+FP+FN)
precision=TP/(TP+FP)
recall=TP/(TP+FN)
F_score=2*precision*recall/(precision+recall)
| [
"[email protected]"
] | |
5e0bde2a16193651c22bf50efd429a326bf6f474 | 6b564e24a99b2d2c6a384d8674974f10ef9461d5 | /iptv_proxy/providers/crystalclear/data_model.py | 53c6ad0d72865ecf54ed3413a6d9df1d667e4c12 | [
"MIT"
] | permissive | Onemars/IPTVProxy | 1c1421c6962c1f7cf4cef90d8a2c98e98f5ded25 | 06d5472f49ecaa7eafb90832a1c9ac85a09cd268 | refs/heads/master | 2020-05-24T14:34:48.486177 | 2019-05-17T14:17:21 | 2019-05-17T14:17:21 | 187,311,948 | 1 | 0 | null | 2019-05-18T03:58:48 | 2019-05-18T03:58:47 | null | UTF-8 | Python | false | false | 6,858 | py | import logging
from sqlalchemy import Column
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import String
from sqlalchemy.ext.hybrid import hybrid_property
from iptv_proxy.data_model import DateTimeUTC
from iptv_proxy.providers.crystalclear.constants import CrystalClearConstants
from iptv_proxy.providers.crystalclear.db import Base
logger = logging.getLogger(__name__)
class CrystalClearChannel(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'channel'
_id = Column('id', String, primary_key=True, autoincrement=False)
_m3u8_group = Column('m3u8_group', String, nullable=False)
_number = Column('number', Integer, nullable=False)
_name = Column('name', String, nullable=False)
_pickle = Column('pickle', LargeBinary, nullable=False)
_complete_xmltv = Column('complete_xmltv', String, nullable=False)
_minimal_xmltv = Column('minimal_xmltv', String, nullable=False)
__table_args__ = (Index('{0}_channel_ix_id'.format(_provider_name), _id.asc()),
Index('{0}_channel_ix_m3u8_group'.format(_provider_name), _m3u8_group.asc()),
Index('{0}_channel_ix_m3u8_group_&_number'.format(_provider_name),
_m3u8_group.asc(),
_number.asc()),
Index('{0}_channel_ix_number'.format(_provider_name), _number.asc()))
def __init__(self, id_, m3u8_group, number, name, pickle, complete_xmltv, minimal_xmltv):
self._id = id_
self._m3u8_group = m3u8_group
self._number = number
self._name = name
self._pickle = pickle
self._complete_xmltv = complete_xmltv
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def complete_xmltv(self):
return self._complete_xmltv
@complete_xmltv.setter
def complete_xmltv(self, complete_xmltv):
self._complete_xmltv = complete_xmltv
@hybrid_property
def id(self):
return self._id
@id.setter
def id(self, id_):
self._id = id_
@hybrid_property
def m3u8_group(self):
return self._m3u8_group
@m3u8_group.setter
def m3u8_group(self, m3u8_group):
self._m3u8_group = m3u8_group
@hybrid_property
def minimal_xmltv(self):
return self._minimal_xmltv
@minimal_xmltv.setter
def minimal_xmltv(self, minimal_xmltv):
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@hybrid_property
def number(self):
return self._number
@number.setter
def number(self, number):
self._number = number
@hybrid_property
def pickle(self):
return self._pickle
@pickle.setter
def pickle(self, pickle):
self._pickle = pickle
class CrystalClearProgram(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'program'
_id = Column('id', String, primary_key=True, autoincrement=False)
_start = Column('start', DateTimeUTC(timezone=True), nullable=False)
_stop = Column('stop', DateTimeUTC(timezone=True), nullable=False)
_channel_xmltv_id = Column('channel_xmltv_id', String, nullable=False)
_channel_number = Column('channel_number', Integer, nullable=False)
_pickle = Column('pickle', LargeBinary, nullable=False)
_complete_xmltv = Column('complete_xmltv', String, nullable=False)
_minimal_xmltv = Column('minimal_xmltv', String, nullable=False)
__table_args__ = (
Index('{0}_program_ix_id'.format(_provider_name), _id.asc()),
Index('{0}_program_ix_channel_number_&_start'.format(_provider_name), _channel_number.asc(), _start.asc()),
Index('{0}_program_ix_channel_xmltv_id_&_start'.format(_provider_name), _channel_xmltv_id.asc(), _start.asc()),
Index('{0}_program_ix_channel_xmltv_id_&_start_&_stop'.format(_provider_name),
_channel_xmltv_id.asc(),
_start.asc(),
_stop.asc()),
Index('{0}_program_ix_start'.format(_provider_name), _start.asc()))
def __init__(self,
id_,
start,
stop,
channel_xmltv_id,
channel_number,
pickle,
complete_xmltv,
minimal_xmltv):
self._id = id_
self._start = start
self._stop = stop
self._channel_xmltv_id = channel_xmltv_id
self._channel_number = channel_number
self._pickle = pickle
self._complete_xmltv = complete_xmltv
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def channel_number(self):
return self._channel_number
@channel_number.setter
def channel_number(self, channel_number):
self._channel_number = channel_number
@hybrid_property
def channel_xmltv_id(self):
return self._channel_xmltv_id
@channel_xmltv_id.setter
def channel_xmltv_id(self, channel_xmltv_id):
self._channel_xmltv_id = channel_xmltv_id
@hybrid_property
def complete_xmltv(self):
return self._complete_xmltv
@complete_xmltv.setter
def complete_xmltv(self, complete_xmltv):
self._complete_xmltv = complete_xmltv
@hybrid_property
def id(self):
return self._id
@id.setter
def id(self, id_):
self._id = id_
@hybrid_property
def minimal_xmltv(self):
return self._minimal_xmltv
@minimal_xmltv.setter
def minimal_xmltv(self, minimal_xmltv):
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def pickle(self):
return self._pickle
@pickle.setter
def pickle(self, pickle):
self._pickle = pickle
@hybrid_property
def start(self):
return self._start
@start.setter
def start(self, start):
self._start = start
@hybrid_property
def stop(self):
return self._stop
@stop.setter
def stop(self, stop):
self._stop = stop
class CrystalClearSetting(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'setting'
_name = Column('name', String, primary_key=True)
_value = Column('value', String, nullable=False)
__table_args__ = (Index('setting_ix_name', _name.asc()),)
def __init__(self, name, value):
self._name = name
self._value = value
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@hybrid_property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
| [
"[email protected]"
] | |
c2c880cdc9c3dc7f4534b7d285264499ff277426 | b406b4cf62616d3c6c0001cd57f5d28c2e09224e | /electronics/migrations/0028_auto_20210615_1442.py | ff731a43d0439f459de28fcc039260635fa53371 | [] | no_license | thefhtamim/Study-Material | 6284c85c717de2068f6a6c70428c4e1414ab76f4 | 15d4ee0da02f52677ab95b708c0cd1607eb52936 | refs/heads/main | 2023-07-26T10:28:59.916360 | 2021-09-10T19:02:33 | 2021-09-10T19:02:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | # Generated by Django 3.1 on 2021-06-15 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('electronics', '0027_auto_20210615_0130'),
]
operations = [
migrations.AddField(
model_name='classnotes',
name='uploaded_on',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='laboratory_videos',
name='uploaded_on',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='video_lectures',
name='uploaded_on',
field=models.DateTimeField(auto_now=True),
),
]
| [
"[email protected]"
] | |
58787a0990221af1de4438a921cad4f347ef9aae | 9b9367781065c789868f65e43342f5bc3ecf1c57 | /utils/solver.py | ddea201cafd17b4cac3738b92ac34c7480471c97 | [] | no_license | sbleblanc/pgm_project | 2e91983047a93add6bb17d1355277d6b35645543 | 272532a90f144fbd485d4608d07aeb5765665da2 | refs/heads/master | 2020-04-04T03:48:37.952140 | 2018-12-20T04:01:14 | 2018-12-20T04:01:14 | 155,726,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | import numpy as np
@np.vectorize
def inverse_solver(f, y, a, b, tol=1e-5):
"""
return x s.t. f(x) = y
this solver assume that f is monotonically increasing inside [a, b] and that x is in [a, b].
It will do dichotomic search until it find x s.t. |f(x) - y| <= tol
"""
x = (a+b)/2
_y = f(x)
while tol < np.abs(_y - y):
if _y < y: a = x
else: b = x
x = (a+b)/2
_y = f(x)
return x
| [
"[email protected]"
] | |
9847fe6297be8aa31e49a6137243226ace453b50 | be55e8a4859a57e0758d88f1d05d69a08b8355ed | /sports_app/views.py | 83f734c4930f5a896e59c4537fb6742cc66d0ec6 | [] | no_license | akyker20/sports_app | 5acfa4762dd565a48f805f38e23d287a41506490 | cda41d463ad4e1e6232d4c69633b806c9af93146 | refs/heads/master | 2021-01-17T00:04:51.475090 | 2015-05-27T07:01:56 | 2015-05-27T07:01:56 | 35,401,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | from django.shortcuts import render
from sports_app.forms import AuthenticateForm, AthleteCreationForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth import login, authenticate, logout
from django.http import HttpResponse
from django.shortcuts import render, redirect
from datetime import date
from sports_app.decorators import group_required
from athletes.models import Team
def home(request):
if request.user.is_authenticated() and request.user.athleteprofile:
return redirect('athlete_profile')
return render(request, 'home.html', {})
# Create your views here.
def login_user(request):
form = AuthenticateForm()
if request.method == 'POST':
form = AuthenticateForm(data=request.POST)
if form.is_valid():
login(request, form.get_user())
if form.get_user().groups.filter(name='athletes').exists():
return redirect('athlete_profile')
elif form.get_user().groups.filter(name='coaches').exists():
return redirect('coach_profile')
return redirect('home')
@login_required
def logout_user(request):
logout(request)
return redirect('home')
| [
"[email protected]"
] | |
87476fc48dcc81c8407d184dc2ba254400452b87 | c5389783a234bc755571f84e619ac296cff4aa4b | /views.py | f9c5a4ac7104989c4e658990236b9aeb89d4533d | [] | no_license | abhishekmajhi42/the_weather_app | 0f5381b2f832077334bb6597c2f55eca6c4b7709 | e52cf4a218c0464fbe542cf47a94b70aa103a796 | refs/heads/master | 2022-12-24T09:08:56.809596 | 2020-09-27T17:15:26 | 2020-09-27T17:15:26 | 299,080,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 862 | py | from django.shortcuts import render
import requests
# Create your views here.
from weatherapp.forms import CityForm
from weatherapp.models import City
def index(request):
url='http://api.openweathermap.org/data/2.5/weather?q={}&units=imperial&appid=271d1234d3f497eed5b1d80a07b3fcd1'
if request.method=="POST":
form=CityForm(request.POST)
form.save()
#city='Las Vegas'
form = CityForm()
cities=City.objects.all()
weather_data=[]
for city in cities:
r=requests.get(url.format(city)).json()
city_weather={'city':city,'temperature':r['main']["temp"],'description':r["weather"][0]["description"],'icon':r["weather"][0]["icon"],}
weather_data.append(city_weather)
context={'weather_data':weather_data,'form':form}
return render(request,'weather.html',context)
| [
"[email protected]"
] |