blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4c838e38957b8e229ba09084ca17679920d4a87a | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/XYPLEX-LAT1-MIB.py | 9115a68e1bcae47973be45f12f9172c3ebd1ff1c | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 19,721 | py | #
# PySNMP MIB module XYPLEX-LAT1-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/XYPLEX-LAT1-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:40:05 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibIdentifier, IpAddress, Counter32, enterprises, NotificationType, Integer32, Bits, iso, Counter64, Gauge32, TimeTicks, Unsigned32, ModuleIdentity, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "IpAddress", "Counter32", "enterprises", "NotificationType", "Integer32", "Bits", "iso", "Counter64", "Gauge32", "TimeTicks", "Unsigned32", "ModuleIdentity", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
xyplex = MibIdentifier((1, 3, 6, 1, 4, 1, 33))
lat = MibIdentifier((1, 3, 6, 1, 4, 1, 33, 3))
latAnnounceServices = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latAnnounceServices.setStatus('mandatory')
latCircuitTimer = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(30, 200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latCircuitTimer.setStatus('mandatory')
latIdentificationLengthLimit = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latIdentificationLengthLimit.setStatus('mandatory')
latKeepaliveTimer = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 180))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latKeepaliveTimer.setStatus('mandatory')
latMulticastTimer = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 180))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latMulticastTimer.setStatus('mandatory')
latNodeLimit = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latNodeLimit.setStatus('mandatory')
latNumber = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32767))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latNumber.setStatus('mandatory')
latRetransmitLimit = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(4, 120))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latRetransmitLimit.setStatus('mandatory')
latLocalServiceGroups = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(32, 32)).setFixedLength(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latLocalServiceGroups.setStatus('mandatory')
latGroupPurge = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latGroupPurge.setStatus('mandatory')
latNodePurge = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latNodePurge.setStatus('mandatory')
latNodesRejected = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodesRejected.setStatus('mandatory')
latInMessages = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInMessages.setStatus('mandatory')
latOutMessages = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latOutMessages.setStatus('mandatory')
latInSessionsAccepted = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInSessionsAccepted.setStatus('mandatory')
latInSessionsRejected = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInSessionsRejected.setStatus('mandatory')
latAddressChange = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latAddressChange.setStatus('mandatory')
latInDuplicates = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInDuplicates.setStatus('mandatory')
latOutRetransmits = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latOutRetransmits.setStatus('mandatory')
latInBadMessages = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInBadMessages.setStatus('mandatory')
latInBadSlots = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInBadSlots.setStatus('mandatory')
latInBadMulticasts = MibScalar((1, 3, 6, 1, 4, 1, 33, 3, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latInBadMulticasts.setStatus('mandatory')
latPortTable = MibTable((1, 3, 6, 1, 4, 1, 33, 3, 23), )
if mibBuilder.loadTexts: latPortTable.setStatus('mandatory')
latPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 33, 3, 23, 1), ).setIndexNames((0, "XYPLEX-LAT1-MIB", "latPortIndex"))
if mibBuilder.loadTexts: latPortEntry.setStatus('mandatory')
latPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 23, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latPortIndex.setStatus('mandatory')
latPortAuthorizedGroups = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 23, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(32, 32)).setFixedLength(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latPortAuthorizedGroups.setStatus('mandatory')
latPortAutoPrompt = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 23, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latPortAutoPrompt.setStatus('mandatory')
latPortCurrentGroups = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 23, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(32, 32)).setFixedLength(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latPortCurrentGroups.setStatus('mandatory')
latPortRemoteModification = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 23, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latPortRemoteModification.setStatus('mandatory')
latOfferedServiceTable = MibTable((1, 3, 6, 1, 4, 1, 33, 3, 24), )
if mibBuilder.loadTexts: latOfferedServiceTable.setStatus('mandatory')
latOfferedServiceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 33, 3, 24, 1), ).setIndexNames((0, "XYPLEX-LAT1-MIB", "latOfferedServiceName"))
if mibBuilder.loadTexts: latOfferedServiceEntry.setStatus('mandatory')
latOfferedServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServiceName.setStatus('mandatory')
latOfferedServiceStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("invalid", 1), ("valid", 2))).clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServiceStatus.setStatus('mandatory')
latOfferedServiceAllowConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServiceAllowConnections.setStatus('mandatory')
latOfferedServiceIdentification = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServiceIdentification.setStatus('mandatory')
latOfferedServicePassword = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServicePassword.setStatus('mandatory')
latOfferedServicePortMap = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 6), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServicePortMap.setStatus('mandatory')
latOfferedServiceQueuing = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 24, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latOfferedServiceQueuing.setStatus('mandatory')
latVisibleServiceTable = MibTable((1, 3, 6, 1, 4, 1, 33, 3, 25), )
if mibBuilder.loadTexts: latVisibleServiceTable.setStatus('mandatory')
latVisibleServiceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 33, 3, 25, 1), ).setIndexNames((0, "XYPLEX-LAT1-MIB", "latVisibleServiceName"))
if mibBuilder.loadTexts: latVisibleServiceEntry.setStatus('mandatory')
latVisibleServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 25, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latVisibleServiceName.setStatus('mandatory')
latVisibleServiceStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 25, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("available", 1), ("unavailable", 2), ("unknown", 3), ("unreachable", 4), ("reachable", 5), ("connected", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latVisibleServiceStatus.setStatus('mandatory')
latVisibleServiceNode = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 25, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latVisibleServiceNode.setStatus('mandatory')
latVisibleServiceConnectedSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 25, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latVisibleServiceConnectedSessions.setStatus('mandatory')
latVisibleServiceIdentification = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 25, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latVisibleServiceIdentification.setStatus('mandatory')
latVisibleServiceRating = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 25, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latVisibleServiceRating.setStatus('mandatory')
latNodeTable = MibTable((1, 3, 6, 1, 4, 1, 33, 3, 26), )
if mibBuilder.loadTexts: latNodeTable.setStatus('mandatory')
latNodeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 33, 3, 26, 1), ).setIndexNames((0, "XYPLEX-LAT1-MIB", "latNodeName"))
if mibBuilder.loadTexts: latNodeEntry.setStatus('mandatory')
latNodeName = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeName.setStatus('mandatory')
latNodeStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("available", 1), ("unavailable", 2), ("unknown", 3), ("unreachable", 4), ("reachable", 5), ("connected", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeStatus.setStatus('mandatory')
latNodeConnectedSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeConnectedSessions.setStatus('mandatory')
latNodeAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeAddress.setStatus('mandatory')
latNodeDataLinkFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeDataLinkFrame.setStatus('mandatory')
latNodeIdentification = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeIdentification.setStatus('mandatory')
latNodeGroups = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(32, 32)).setFixedLength(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latNodeGroups.setStatus('mandatory')
latNodeServiceNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeServiceNumber.setStatus('mandatory')
latNodeZero = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ready", 1), ("execute", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: latNodeZero.setStatus('mandatory')
latNodeZeroTime = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 10), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeZeroTime.setStatus('mandatory')
latNodeInMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInMessages.setStatus('mandatory')
latNodeOutMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeOutMessages.setStatus('mandatory')
latNodeInSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInSlots.setStatus('mandatory')
latNodeOutSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeOutSlots.setStatus('mandatory')
latNodeInBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInBytes.setStatus('mandatory')
latNodeOutBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeOutBytes.setStatus('mandatory')
latNodeAddressChange = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeAddressChange.setStatus('mandatory')
latNodeInDuplicates = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInDuplicates.setStatus('mandatory')
latNodeOutRetransmits = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeOutRetransmits.setStatus('mandatory')
latNodeInBadMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInBadMessages.setStatus('mandatory')
latNodeInBadSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInBadSlots.setStatus('mandatory')
latNodeInSessionsAccepted = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInSessionsAccepted.setStatus('mandatory')
latNodeInSessionsRejected = MibTableColumn((1, 3, 6, 1, 4, 1, 33, 3, 26, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latNodeInSessionsRejected.setStatus('mandatory')
mibBuilder.exportSymbols("XYPLEX-LAT1-MIB", latNodeLimit=latNodeLimit, latOfferedServiceStatus=latOfferedServiceStatus, latInBadSlots=latInBadSlots, latOfferedServiceIdentification=latOfferedServiceIdentification, latMulticastTimer=latMulticastTimer, latOfferedServiceEntry=latOfferedServiceEntry, latVisibleServiceTable=latVisibleServiceTable, latNodeStatus=latNodeStatus, xyplex=xyplex, latOfferedServiceQueuing=latOfferedServiceQueuing, latVisibleServiceConnectedSessions=latVisibleServiceConnectedSessions, latPortCurrentGroups=latPortCurrentGroups, latOutMessages=latOutMessages, latNodeInBytes=latNodeInBytes, latVisibleServiceRating=latVisibleServiceRating, latInBadMessages=latInBadMessages, lat=lat, latGroupPurge=latGroupPurge, latNodeZero=latNodeZero, latKeepaliveTimer=latKeepaliveTimer, latInMessages=latInMessages, latInSessionsRejected=latInSessionsRejected, latNodeAddressChange=latNodeAddressChange, latCircuitTimer=latCircuitTimer, latNodeOutRetransmits=latNodeOutRetransmits, latRetransmitLimit=latRetransmitLimit, latOfferedServiceAllowConnections=latOfferedServiceAllowConnections, latOfferedServicePortMap=latOfferedServicePortMap, latVisibleServiceNode=latVisibleServiceNode, latAnnounceServices=latAnnounceServices, latNodeZeroTime=latNodeZeroTime, latNodeDataLinkFrame=latNodeDataLinkFrame, latNodeTable=latNodeTable, latVisibleServiceStatus=latVisibleServiceStatus, latNodeConnectedSessions=latNodeConnectedSessions, latNodeInSessionsRejected=latNodeInSessionsRejected, latNodeInBadSlots=latNodeInBadSlots, latOfferedServiceName=latOfferedServiceName, latNodeEntry=latNodeEntry, latNodeOutSlots=latNodeOutSlots, latInSessionsAccepted=latInSessionsAccepted, latVisibleServiceName=latVisibleServiceName, latNodePurge=latNodePurge, latNodeOutMessages=latNodeOutMessages, latOfferedServiceTable=latOfferedServiceTable, latInBadMulticasts=latInBadMulticasts, latNodeInMessages=latNodeInMessages, latNodeInSlots=latNodeInSlots, latPortTable=latPortTable, latOfferedServicePassword=latOfferedServicePassword, latNodeGroups=latNodeGroups, latPortAutoPrompt=latPortAutoPrompt, latLocalServiceGroups=latLocalServiceGroups, latNodeServiceNumber=latNodeServiceNumber, latPortEntry=latPortEntry, latPortRemoteModification=latPortRemoteModification, latIdentificationLengthLimit=latIdentificationLengthLimit, latNumber=latNumber, latVisibleServiceIdentification=latVisibleServiceIdentification, latNodeIdentification=latNodeIdentification, latNodeOutBytes=latNodeOutBytes, latNodeInBadMessages=latNodeInBadMessages, latInDuplicates=latInDuplicates, latNodeName=latNodeName, latVisibleServiceEntry=latVisibleServiceEntry, latAddressChange=latAddressChange, latPortIndex=latPortIndex, latNodeAddress=latNodeAddress, latNodeInDuplicates=latNodeInDuplicates, latNodeInSessionsAccepted=latNodeInSessionsAccepted, latPortAuthorizedGroups=latPortAuthorizedGroups, latNodesRejected=latNodesRejected, latOutRetransmits=latOutRetransmits)
| [
"[email protected]"
] | |
a39a00acac47914e717411524682266198077482 | 7fb51ae4163aeea47d0fb434f28666ea99b104af | /app.py | 2cb0275c32bef3070e1b21c6218a864f8431cfd1 | [] | no_license | knowsuchagency/cdk-hello-apigw-asgi | 153eaae8d01a14e5886315122613c462ea90de70 | a47cdc58ddd9bb070419d4fbcfa1cf07fb3873f9 | refs/heads/master | 2022-12-28T15:44:05.585842 | 2020-10-18T18:17:15 | 2020-10-18T18:17:15 | 301,259,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 764 | py | #!/usr/bin/env python3
from aws_cdk import core
from hello_apig_wsgi.hello_apig_wsgi_stack import HelloApigWsgiStack
from hello_apig_wsgi.pipeline_stack import PipelineStack
from pydantic import BaseSettings
class Config(BaseSettings):
"""https://pydantic-docs.helpmanual.io/usage/settings/"""
account: str = "385504394431"
region: str = "us-east-2"
gh_username: str = "knowsuchagency"
gh_repo: str = "cdk-hello-apigw-asgi"
if __name__ == "__main__":
config = Config()
app = core.App()
application_stack = HelloApigWsgiStack(app, "application")
pipeline_stack = PipelineStack(
app,
"pipeline",
config,
env={"account": config.account, "region": config.region},
)
app.synth()
| [
"[email protected]"
] | |
31c03c46273a3ec99f7d4ec05e1b47a219fe961a | 291c08a11a29ce995099f775ac0ef79cd69dd1fc | /file_app/migrations/0001_initial.py | 3918065b948c8b8a81a7a5331b098db45406b028 | [
"MIT"
] | permissive | Amirsorouri00/neolej | 1e278a2216a961b8abedc32b30d4fccf5c431d0b | 8fa18f2c1a38b0a59ed7eeeed7ed37ef7b9dad97 | refs/heads/master | 2020-04-20T15:36:24.669991 | 2019-03-17T07:20:02 | 2019-03-17T07:20:02 | 168,935,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | # Generated by Django 2.1.3 on 2019-02-16 15:28
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='File',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('remark', models.CharField(max_length=31)),
('timestamp', models.DateTimeField(auto_now_add=True)),
],
),
]
| [
"[email protected]"
] | |
95ee6d9028cb4c1c7c5a614b96db2580eee8344c | e859d4604615e4ff3c6730554b12ae7b09e86286 | /django-stubs/db/models/fields/files.pyi | bb53d5944104eade0990047b3af0abafb3dbaff7 | [
"BSD-3-Clause"
] | permissive | microblag/django-stubs | d91655c346279424cf5e57b80a0b104dceb86ddc | d0eb05832551d344f06ec3e83cb850866a4d37c2 | refs/heads/master | 2020-04-18T05:18:24.887114 | 2019-02-06T04:02:28 | 2019-02-06T04:02:28 | 167,273,694 | 0 | 0 | null | 2019-01-24T00:12:42 | 2019-01-24T00:12:42 | null | UTF-8 | Python | false | false | 2,954 | pyi | from typing import Any, Callable, List, Optional, Type, Union
from django.core.checks.messages import Error
from django.core.files.base import File
from django.core.files.images import ImageFile
from django.core.files.storage import FileSystemStorage, Storage
from django.db.models.base import Model
from django.db.models.fields import Field
from django.forms import fields as form_fields
class FieldFile(File):
instance: Model = ...
field: FileField = ...
storage: FileSystemStorage = ...
def __init__(self, instance: Model, field: FileField, name: Optional[str]) -> None: ...
file: Any = ...
@property
def path(self) -> str: ...
@property
def url(self) -> str: ...
@property
def size(self) -> int: ...
def save(self, name: str, content: File, save: bool = ...) -> None: ...
def delete(self, save: bool = ...) -> None: ...
@property
def closed(self) -> bool: ...
class FileDescriptor:
field: FileField = ...
def __init__(self, field: FileField) -> None: ...
def __get__(self, instance: Optional[Model], cls: Type[Model] = ...) -> Union[FieldFile, FileDescriptor]: ...
def __set__(self, instance: Model, value: Optional[Any]) -> None: ...
class FileField(Field):
attr_class: Any = ...
descriptor_class: Any = ...
description: Any = ...
storage: Any = ...
upload_to: Any = ...
def __init__(
self,
verbose_name: Optional[str] = ...,
name: Optional[str] = ...,
upload_to: Union[Callable, str] = ...,
storage: Optional[Storage] = ...,
**kwargs: Any
) -> None: ...
def check(self, **kwargs: Any) -> List[Error]: ...
def deconstruct(self) -> Any: ...
def get_internal_type(self) -> str: ...
def get_prep_value(self, value: Union[FieldFile, str]) -> str: ...
def pre_save(self, model_instance: Model, add: bool) -> FieldFile: ...
def generate_filename(self, instance: Optional[Model], filename: str) -> str: ...
def save_form_data(self, instance: Model, data: Optional[Union[bool, File, str]]) -> None: ...
def formfield(self, **kwargs: Any) -> form_fields.FileField: ...
class ImageFileDescriptor(FileDescriptor):
field: ImageField
def __set__(self, instance: Model, value: Optional[str]) -> None: ...
class ImageFieldFile(ImageFile, FieldFile):
field: ImageField
def delete(self, save: bool = ...) -> None: ...
class ImageField(FileField):
def __init__(
self,
verbose_name: Optional[str] = ...,
name: Optional[str] = ...,
width_field: Optional[str] = ...,
height_field: Optional[str] = ...,
**kwargs: Any
) -> None: ...
def check(self, **kwargs: Any) -> List[Any]: ...
def deconstruct(self) -> Any: ...
def update_dimension_fields(self, instance: Model, force: bool = ..., *args: Any, **kwargs: Any) -> None: ...
def formfield(self, **kwargs: Any) -> form_fields.ImageField: ...
| [
"[email protected]"
] | |
93445be0fe7f2304b57849fd393fb87152e4fed1 | 95230c76a9e09d518c125ea8105002a7af6d1afc | /05_qstyle_sheets/style_sheets_example.py | 48aab6b37dbed01f2b7497c75912ca16b2631c56 | [] | no_license | amkartheek/nuke_python | d5f86f5ccb9742cd65acaf571fd4f5c7ca4032ff | 67ed5e25796506c9321f487f576bc142842e0041 | refs/heads/master | 2020-05-31T19:04:19.463232 | 2018-03-09T19:17:19 | 2018-03-09T19:17:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,088 | py | from PySide.QtGui import *
from PySide.QtCore import *
import sys
class MyLineEdit(QLineEdit):
def __init__(self):
super(MyLineEdit, self).__init__()
class Panel(QWidget):
def __init__(self):
super(Panel, self).__init__()
first_name_label = QLabel("First Name:")
self.first_name = QLineEdit()
self.first_name.setProperty("valid", False)
self.first_name.setObjectName("first_name")
last_name_label = QLabel("Last Name:")
last_name = QLineEdit()
name_layout = QHBoxLayout()
name_layout.addWidget(first_name_label)
name_layout.addWidget(self.first_name)
name_layout.addWidget(last_name_label)
name_layout.addWidget(last_name)
role_label = QLabel("Role")
role_combobox = QComboBox()
role_combobox.addItems(["Pipeline TD", "Compositor", "FX TD", "Modeler", "Animator", "Lighting TD"])
role_layout = QHBoxLayout()
role_layout.addWidget(role_label)
role_layout.addWidget(role_combobox)
role_layout.addStretch()
self.gender_male_checkbox = QCheckBox("male")
self.gender_famale_checbox = QCheckBox("famale")
gender_layout = QHBoxLayout()
gender_layout.addWidget(self.gender_male_checkbox)
gender_layout.addWidget(self.gender_famale_checbox)
gender_layout.addStretch()
list_widget = QListWidget()
list_widget.addItems(["Canada", "USA", "Japan", "London", "Australia"])
# list_widget.setAlternatingRowColors(True)
save_push_button = QPushButton("OK")
close_pusu_button = QPushButton("Close")
action_layout = QHBoxLayout()
action_layout.addWidget(save_push_button)
action_layout.addWidget(close_pusu_button)
master_layout = QVBoxLayout()
master_layout.addLayout(name_layout)
master_layout.addLayout(role_layout)
master_layout.addLayout(gender_layout)
master_layout.addWidget(list_widget)
master_layout.addLayout(action_layout)
self.setLayout(master_layout)
# Signals
close_pusu_button.clicked.connect(self.close)
save_push_button.clicked.connect(self.show_message_box)
self.gender_male_checkbox.clicked.connect(self.set_checkbox)
self.gender_famale_checbox.clicked.connect(self.set_checkbox)
self.first_name.textChanged.connect(self.check_validity)
self.set_style_sheet()
def check_validity(self, text):
self.first_name.setProperty("valid", bool(text))
self.set_style_sheet()
def set_style_sheet(self):
text = open("style.txt").read()
self.setStyleSheet(text)
def set_checkbox(self):
self.gender_famale_checbox.setChecked(self.sender() is self.gender_famale_checbox)
self.gender_male_checkbox.setChecked(self.sender() is self.gender_male_checkbox)
def show_message_box(self):
QMessageBox.information(self, "information", "User saved successfully!")
app = QApplication(sys.argv)
panel = Panel()
panel.show()
app.exec_()
| [
"[email protected]"
] | |
bc72cc0f0343ca37bc40790a466c5e2c0b09be43 | 2f46c6463d4f871a72d4296c3dae00f029e892f1 | /src/cogent3/maths/stats/jackknife.py | 33192edc584ffa4dc6506935473a1e778893a7bd | [
"BSD-3-Clause"
] | permissive | BrendanBeaton/cogent3 | a09376c55f24da837690219157770ad94e917579 | e10f4f933921d52b000096b7c016190a1602add6 | refs/heads/master | 2022-12-02T07:59:11.112306 | 2020-06-30T05:40:33 | 2020-06-30T05:40:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,782 | py | import numpy as np
from cogent3.util.table import Table
__author__ = "Anuj Pahwa, Gavin Huttley"
__copyright__ = "Copyright 2007-2020, The Cogent Project"
__credits__ = ["Anuj Pahwa", "Gavin Huttley"]
__license__ = "BSD-3"
__version__ = "2020.6.30a"
__maintainer__ = "Gavin Huttley"
__email__ = "[email protected]"
__status__ = "Production"
def index_gen(length):
data = tuple(range(length))
def gen(i):
temp = list(data)
temp.pop(i)
return temp
return gen
class JackknifeStats(object):
"""Computes the jackknife statistic for a particular statistical function
as outlined by 'Tukey's Jackknife Method' Biometry by Sokal/Rohlf."""
def __init__(self, length, calc_stat, gen_index=index_gen):
"""
Parameters
----------
length : int
The length of the data set (since data is not passed to this class).
calc_stat : callable
A callback function that computes the required statistic of a defined dataset.
gen_index
A callback function that generates a list of indices that are used to sub-sample the dataset.
"""
super(JackknifeStats, self).__init__()
self.n = length
self.calc_stat = calc_stat
self.gen_index = gen_index(self.n)
self._subset_statistics = None
self._pseudovalues = None
self._jackknifed_stat = None
self._sample_statistic = None
self._standard_error = None
def jackknife(self):
"""Computes the jackknife statistics and standard error"""
n = self.n
n_minus_1 = n - 1
# compute the statistic in question on the whole data set
self._sample_statistic = self.calc_stat(list(range(self.n)))
n_sample_statistic = n * self._sample_statistic
# compute the jackknife statistic for the data by removing an element
# in each iteration and computing the statistic.
subset_statistics = []
pseudovalues = []
for index in range(self.n):
stat = self.calc_stat(self.gen_index(index))
subset_statistics.append(stat)
pseudovalue = n_sample_statistic - n_minus_1 * stat
pseudovalues.append(pseudovalue)
self._pseudovalues = np.array(pseudovalues)
self._subset_statistics = np.array(subset_statistics)
self._jackknifed_stat = self._pseudovalues.mean(axis=0)
# Compute the approximate standard error of the jackknifed estimate
# of the statistic
variance = np.square(self._pseudovalues - self._jackknifed_stat).sum(axis=0)
variance_norm = np.divide(variance, n * n_minus_1)
self._standard_error = np.sqrt(variance_norm)
@property
def sample_stat(self):
if self._sample_statistic is None:
self.jackknife()
return self._sample_statistic
@property
def jackknifed_stat(self):
if self._jackknifed_stat is None:
self.jackknife()
return self._jackknifed_stat
@property
def standard_error(self):
if self._standard_error is None:
self.jackknife()
return self._standard_error
@property
def sub_sample_stats(self):
"""Return a table of the sub-sample statistics"""
# if the statistics haven't been run yet.
if self._subset_statistics is None:
self.jackknife()
# generate table
title = "Subsample Stats"
rows = []
for index in range(self.n):
row = [index]
subset_statistics = self._subset_statistics[index]
try:
for value in subset_statistics:
row.append(value)
except TypeError:
row.append(subset_statistics)
rows.append(row)
header = ["i"]
subset_stats = self._subset_statistics[0]
try:
num_datasets = len(subset_stats)
for i in range(num_datasets):
header.append("Stat_%s-i" % i)
except TypeError:
header.append("Stat-i")
return Table(data=rows, header=header, title=title)
@property
def pseudovalues(self):
"""Return a table of the Pseudovalues"""
# if the statistics haven't been run yet.
if self._pseudovalues is None:
self.jackknife()
# detailed table
title = "Pseudovalues"
rows = []
for index in range(self.n):
row = [index]
pseudovalues = self._pseudovalues[index]
try:
for value in pseudovalues:
row.append(value)
except TypeError:
row.append(pseudovalues)
rows.append(row)
header = ["i"]
pseudovalues = self._pseudovalues[0]
try:
num_datasets = len(pseudovalues)
for i in range(num_datasets):
header.append("Pseudovalue_%s-i" % i)
except TypeError:
header.append("Pseudovalue-i")
return Table(data=rows, header=header, title=title)
@property
def summary_stats(self):
"""Return a summary table with the statistic value(s) calculated for the
the full data-set, the jackknife statistics and standard errors."""
# if the statistics haven't been run yet.
if self._jackknifed_stat is None:
self.jackknife()
header = ["Sample Stat", "Jackknife Stat", "Standard Error"]
title = "Summary Statistics"
rows = np.vstack(
(self._sample_statistic, self._jackknifed_stat, self._standard_error)
)
rows = rows.transpose()
return Table(header=header, data=rows, title=title)
| [
"[email protected]"
] | |
e0b15df612ba3b856357439a9d6586d0186b146e | c0c4fe8f9aff2e7684fcaf10329f963873753b2a | /src/biotite/sequence/sequence.py | 1a6b8230a35cd5b6afd265692459ee224fe40473 | [
"BSD-3-Clause"
] | permissive | thomasnevolianis/biotite | 85e1b9d6a1fbb5d9f81501a8ebc617bc26388ab9 | 916371eb602cfcacb2d5356659298ef38fa01fcc | refs/heads/master | 2022-11-30T19:40:53.017368 | 2020-08-04T07:00:59 | 2020-08-04T07:00:59 | 285,375,415 | 0 | 0 | BSD-3-Clause | 2020-08-05T18:41:48 | 2020-08-05T18:41:47 | null | UTF-8 | Python | false | false | 11,010 | py | # This source code is part of the Biotite package and is distributed
# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further
# information.
"""
The module contains the :class:`Sequence` superclass and :class:`GeneralSequence`.
"""
__name__ = "biotite.sequence"
__author__ = "Patrick Kunzmann"
__all__ = ["Sequence"]
import numbers
import abc
import numpy as np
from .alphabet import Alphabet, LetterAlphabet
from ..copyable import Copyable
_size_uint8 = np.iinfo(np.uint8 ).max +1
_size_uint16 = np.iinfo(np.uint16).max +1
_size_uint32 = np.iinfo(np.uint32).max +1
class Sequence(Copyable, metaclass=abc.ABCMeta):
"""
The abstract base class for all sequence types.
A :class:`Sequence` can be seen as a succession of symbols, that are
elements in the allowed set of symbols, the :class:`Alphabet`.
Internally, a :class:`Sequence` object uses a *NumPy*
:class:`ndarray` of integers, where each integer represents a
symbol.
The :class:`Alphabet` of a :class:`Sequence` object is used to
encode each symbol, that is used to create the
:class:`Sequence`, into an integer. These integer values are called
symbol code, the encoding of an entire sequence of symbols is
called sequence code.
The size of the symbol code type in the array is determined by the
size of the :class:`Alphabet`:
If the :class:`Alphabet` contains 256 symbols or less, one byte is
used per array element; if the :class:`Alphabet` contains
between 257 and 65536 symbols, two bytes are used, and so on.
Two :class:`Sequence` objects are equal if they are instances of the
same class, have the same :class:`Alphabet` and have equal sequence
codes.
Comparison with a string or list of symbols evaluates always to
false.
A :class:`Sequence` can be indexed by any 1-D index a
:class:`ndarray` accepts.
If the index is a single integer, the decoded symbol at that
position is returned, otherwise a subsequence is returned.
Individual symbols of the sequence can also be exchanged in indexed
form: If the an integer is used as index, the item is treated as a
symbol. Any other index (slice, index list, boolean mask) expects
multiple symbols, either as list of symbols, as :class:`ndarray`
containing a sequence code or another :class:`Sequence` instance.
Concatenation of two sequences is achieved with the '+' operator.
Each subclass of :class:`Sequence` needs to overwrite the abstract
method :func:`get_alphabet()`, which specifies the alphabet the
:class:`Sequence` uses.
Parameters
----------
sequence : iterable object, optional
The symbol sequence, the :class:`Sequence` is initialized with.
For alphabets containing single letter strings, this parameter
may also be a :class`str` object.
By default the sequence is empty.
Attributes
----------
code : ndarray
The sequence code.
symbols : list
The list of symbols, represented by the sequence.
The list is generated by decoding the sequence code, when
this attribute is accessed. When this attribute is modified,
the new list of symbols is encoded into the sequence code.
alphabet : Alphabet
The alphabet of this sequence. Cannot be set.
Equal to `get_alphabet()`.
Examples
--------
Creating a DNA sequence from string and print the symbols and the
code:
>>> dna_seq = NucleotideSequence("ACGTA")
>>> print(dna_seq)
ACGTA
>>> print(dna_seq.code)
[0 1 2 3 0]
>>> print(dna_seq.symbols)
['A' 'C' 'G' 'T' 'A']
>>> print(list(dna_seq))
['A', 'C', 'G', 'T', 'A']
Sequence indexing:
>>> print(dna_seq[1:3])
CG
>>> print(dna_seq[[0,2,4]])
AGA
>>> print(dna_seq[np.array([False,False,True,True,True])])
GTA
Sequence manipulation:
>>> dna_copy = dna_seq.copy()
>>> dna_copy[2] = "C"
>>> print(dna_copy)
ACCTA
>>> dna_copy = dna_seq.copy()
>>> dna_copy[0:2] = dna_copy[3:5]
>>> print(dna_copy)
TAGTA
>>> dna_copy = dna_seq.copy()
>>> dna_copy[np.array([True,False,False,False,True])] = "T"
>>> print(dna_copy)
TCGTT
>>> dna_copy = dna_seq.copy()
>>> dna_copy[1:4] = np.array([0,1,2])
>>> print(dna_copy)
AACGA
Reverse sequence:
>>> dna_seq_rev = dna_seq.reverse()
>>> print(dna_seq_rev)
ATGCA
Concatenate the two sequences:
>>> dna_seq_concat = dna_seq + dna_seq_rev
>>> print(dna_seq_concat)
ACGTAATGCA
"""
def __init__(self, sequence=()):
self.symbols = sequence
def copy(self, new_seq_code=None):
"""
Copy the object.
Parameters
----------
new_seq_code : ndarray, optional
If this parameter is set, the sequence code is set to this
value, rather than the original sequence code.
Returns
-------
copy
A copy of this object.
"""
# Override in order to achieve better performance,
# in case only a subsequence is needed,
# because not the entire sequence code is copied then
clone = self.__copy_create__()
if new_seq_code is None:
clone.code = np.copy(self.code)
else:
clone.code = new_seq_code
self.__copy_fill__(clone)
return clone
@property
def symbols(self):
return self.get_alphabet().decode_multiple(self.code)
@symbols.setter
def symbols(self, value):
alph = self.get_alphabet()
dtype = Sequence._dtype(len(alph))
self._seq_code = alph.encode_multiple(value, dtype)
@property
def code(self):
return self._seq_code
@code.setter
def code(self, value):
dtype = Sequence._dtype(len(self.get_alphabet()))
if not isinstance(value, np.ndarray):
raise TypeError("Sequence code must be an integer ndarray")
self._seq_code = value.astype(dtype, copy=False)
@property
def alphabet(self):
return self.get_alphabet()
@abc.abstractmethod
def get_alphabet(self):
"""
Get the :class:`Alphabet` of the :class:`Sequence`.
This method must be overwritten, when subclassing
:class:`Sequence`.
Returns
-------
alphabet : Alphabet
:class:`Sequence` alphabet.
"""
pass
def reverse(self):
"""
Reverse the :class:`Sequence`.
Returns
-------
reversed : Sequence
The reversed :class:`Sequence`.
Examples
--------
>>> dna_seq = NucleotideSequence("ACGTA")
>>> dna_seq_rev = dna_seq.reverse()
>>> print(dna_seq_rev)
ATGCA
"""
reversed_code = np.flip(np.copy(self._seq_code), axis=0)
reversed = self.copy(reversed_code)
return reversed
def is_valid(self):
"""
Check, if the sequence contains a valid sequence code.
A sequence code is valid, if at each sequence position the
code is smaller than the size of the alphabet.
Invalid code means that the code cannot be decoded into
symbols. Furthermore invalid code can lead to serious
errors in alignments, since the substitution matrix
is indexed with an invalid index.
Returns
-------
valid : bool
True, if the sequence is valid, false otherwise.
"""
return (self.code < len(self.get_alphabet())).all()
def get_symbol_frequency(self):
"""
Get the number of occurences of each symbol in the sequence.
If a symbol does not occur in the sequence, but it is in the
alphabet, its number of occurences is 0.
Returns
-------
frequency : dict
A dictionary containing the symbols as keys and the
corresponding number of occurences in the sequence as
values.
"""
frequencies = {}
for code, symbol in enumerate(self.get_alphabet()):
frequencies[symbol] = len(np.nonzero((self._seq_code == code))[0])
return frequencies
def __getitem__(self, index):
alph = self.get_alphabet()
sub_seq = self._seq_code.__getitem__(index)
if isinstance(sub_seq, np.ndarray):
return self.copy(sub_seq)
else:
return alph.decode(sub_seq)
def __setitem__(self, index, item):
alph = self.get_alphabet()
if isinstance(index, numbers.Integral):
# Expect a single symbol
code = alph.encode(item)
else:
# Expect multiple symbols
if isinstance(item, Sequence):
code = item.code
elif isinstance(item, np.ndarray):
code = item
else:
# Default: item is iterable object of symbols
code = alph.encode_multiple(item)
self._seq_code.__setitem__(index, code)
def __len__(self):
return len(self._seq_code)
def __iter__(self):
alph = self.get_alphabet()
i = 0
while i < len(self):
yield alph.decode(self._seq_code[i])
i += 1
def __eq__(self, item):
if not isinstance(item, type(self)):
return False
if self.get_alphabet() != item.get_alphabet():
return False
return np.array_equal(self._seq_code, item._seq_code)
def __str__(self):
alph = self.get_alphabet()
if isinstance(alph, LetterAlphabet):
return alph.decode_multiple(self._seq_code, as_bytes=True)\
.tobytes().decode("ASCII")
else:
return "".join(alph.decode_multiple(self._seq_code))
def __add__(self, sequence):
if self.get_alphabet().extends(sequence.get_alphabet()):
new_code = np.concatenate((self._seq_code, sequence._seq_code))
new_seq = self.copy(new_code)
return new_seq
elif sequence.get_alphabet().extends(self.get_alphabet()):
new_code = np.concatenate((self._seq_code, sequence._seq_code))
new_seq = sequence.copy(new_code)
return new_seq
else:
raise ValueError("The sequences alphabets are not compatible")
@staticmethod
def _dtype(alphabet_size):
if alphabet_size <= _size_uint8:
return np.uint8
elif alphabet_size <= _size_uint16:
return np.uint16
elif alphabet_size <= _size_uint32:
return np.uint32
else:
return np.uint64
| [
"[email protected]"
] | |
f15ea5350f91db08607111b1b3da17afdb7e9df0 | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /compositional_rl/gwob/examples/web_environment_example.py | db65accda519a7ce01ec591613e7c7d0385b57be | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 6,400 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example execution of a rule-based optimal policy on gminiwob shopping."""
import time
from absl import app
from absl import flags
from absl import logging
from CoDE import test_websites
from CoDE import utils
from CoDE import vocabulary_node
from CoDE import web_environment
flags.DEFINE_string("data_dep_path", None,
"Data dep path for local miniwob files.")
flags.DEFINE_boolean(
"run_headless_mode", False,
"Run in headless mode. On borg, this should always be true.")
flags.DEFINE_boolean(
"use_conceptual", False,
"If true, use abstract web navigation where it is assumed to known which profile field corresponds to which element."
)
FLAGS = flags.FLAGS
def run_policy_on_shopping_website():
"""Run an optimal policy on the shopping website and visualize in browser."""
# Create a generic web environment to which we will add primitives and
# transitions to create a shopping website. These parameters will work to
# observe a simple policy running but they might be insufficient in a training
# setting as observations will be converted into arrays and these parameters
# are used to shape them. In this example, they don't have that effect.
env = web_environment.GMiniWoBWebEnvironment(
base_url="file://{}/".format(FLAGS.data_dep_path),
subdomain="gminiwob.generic_website",
profile_length=5,
number_of_fields=5,
use_only_profile_key=False,
number_of_dom_elements=150,
dom_attribute_sequence_length=5,
keyboard_action_size=5,
kwargs_dict={
"headless": FLAGS.run_headless_mode,
"threading": False
},
step_limit=25,
global_vocabulary=vocabulary_node.LockedVocabulary(),
use_conceptual=FLAGS.use_conceptual)
# Create a shopping website design with difficulty = 3.
website = test_websites.create_shopping_website(3)
design = test_websites.generate_website_design_from_created_website(
website)
# Design the actual environment.
env.design_environment(
design, auto_num_pages=True)
# Make sure raw_state=True as this will return raw observations not numpy
# arrays.
state = env.reset(raw_state=True)
# Optimal sequences of elements to visit. Some might be redundant and will be
# skipped.
optimal_actions = [
"group_next_p0",
"group_username",
"group_password",
"group_rememberme",
"group_captcha",
"group_stayloggedin",
"group_next_p1",
"group_next_p2",
"group_name_first",
"group_name_last",
"group_address_line1",
"group_address_line2",
"group_city",
"group_postal_code",
"group_state",
"group_submit_p2",
]
# Corresponding pages of these elements:
# [0, 1, 1, 1, 1, 1, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3]
reward = 0.0
logging.info("Utterance: %s", str(state.utterance))
logging.info("\n\n")
logging.info("All available primitives: %s",
str(env.get_all_actionable_primitives()))
logging.info("\n\n")
# Iterate over all optimal actions. For each action, iterate over all elements
# in the current observation. If an element matches, execute the optimal
# action and continue.
# Iterate over optimal actions.
for action in optimal_actions:
logging.info("Element at focus: %s", str(action))
# Iterate over all elements in the current observation.
# order_dom_elements returns an ordered list of DOM elements to make the
# order and elements consistent.
for i, element in enumerate(
utils.order_dom_elements(state.dom_elements, html_id_prefix=None)):
# If HTML if of the element matches the action, execute the action.
if element.id == action.replace("group", "actionable"):
logging.info("Acting on (%s)", str(element))
logging.info("\tAttributes of the element: %s",
str(utils.dom_attributes(element, 5)))
# Get the corresponding profile fields.
profile_keys = env.raw_profile.keys
# Execute the (element index, profile field index) action on the
# website. Environment step function accepts a single scalar action.
# We flatten the action from a tuple to a scalar which is deflattened
# back to a tuple in the step function.
if action[len("group") +
1:] in profile_keys and not FLAGS.use_conceptual:
logging.info("Profile: %s, Element ID: %s",
str(profile_keys.index(action[len("group") + 1:])),
str(action[len("group") + 1:]))
# action=element_index + profile_field_index * number_of_elements
# This is converted back into a tuple using a simple modulo
# arithmetic.
state, r, _, _ = env.step(
i + profile_keys.index(action[len("group") + 1:]) *
env.number_of_dom_elements, True)
else: # This is the case where we have abstract navigation problem.
logging.info("Element ID: %s", str(action[len("group") + 1:]))
# We don't need to convert a tuple into a scalar because in this case
# the environment expects the index of the element.
state, r, _, _ = env.step(i, True)
logging.info("Current reward: %f", r)
reward += r
if not FLAGS.run_headless_mode:
# wait 1 sec so that the action can be observed on the browser.
time.sleep(1)
break
logging.info("Final reward: %f", reward)
if not FLAGS.run_headless_mode:
# wait 30 secs so that the users can inspect the html in the browser.
time.sleep(30)
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
run_policy_on_shopping_website()
if __name__ == "__main__":
app.run(main)
| [
"[email protected]"
] | |
8c5ffaaa66db4fcbb98cfd663e36037edaa8163a | abaa806550f6e6e7bcdf71b9ec23e09a85fe14fd | /data/global-configuration/packs/vmware/collectors/vmguestlib.py | eb9e2dabd67d95667afa30dc59ee76accdf5f3c7 | [
"MIT"
] | permissive | naparuba/opsbro | 02809ddfe22964cd5983c60c1325c965e8b02adf | 98618a002cd47250d21e7b877a24448fc95fec80 | refs/heads/master | 2023-04-16T08:29:31.143781 | 2019-05-15T12:56:11 | 2019-05-15T12:56:11 | 31,333,676 | 34 | 7 | null | null | null | null | UTF-8 | Python | false | false | 22,971 | py | ### This program is free software; you can redistribute it and/or
### modify it under the terms of the GNU General Public License
### as published by the Free Software Foundation; either version 2
### of the License, or (at your option) any later version.
###
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
### Copyright 2013-2014 Dag Wieers <[email protected]>
from ctypes import *
from ctypes.util import find_library
__author__ = 'Dag Wieers <[email protected]>'
__version__ = '0.1.2'
__version_info__ = tuple([int(d) for d in __version__.split('.')])
__license__ = 'GNU General Public License (GPL)'
# TODO: Implement support for Windows and MacOSX, improve Linux support ?
if find_library('vmGuestLib'):
vmGuestLib = CDLL(find_library('vmGuestLib'))
elif find_library('guestlib'):
vmGuestLib = CDLL(find_library('guestlib'))
# elif os.path.exists('/usr/lib/vmware-tools/lib/libvmGuestLib.so/libvmGuestLib.so'):
# vmGuestLib = CDLL('/usr/lib/vmware-tools/lib/libvmGuestLib.so/libvmGuestLib.so')
# elif os.path.exists('%PROGRAMFILES%\\VMware\\VMware Tools\\Guest SDK\\vmStatsProvider\win32\\vmGuestLib.dll'):
# vmGuestLib = CDLL('%PROGRAMFILES%\\VMware\\VMware Tools\\Guest SDK\\vmStatsProvider\win32\\vmGuestLib.dll')
else:
raise Exception, 'ERROR: Cannot find vmGuestLib library in LD_LIBRARY_PATH'
VMGUESTLIB_ERROR_SUCCESS = 0
VMGUESTLIB_ERROR_OTHER = 1
VMGUESTLIB_ERROR_NOT_RUNNING_IN_VM = 2
VMGUESTLIB_ERROR_NOT_ENABLED = 3
VMGUESTLIB_ERROR_NOT_AVAILABLE = 4
VMGUESTLIB_ERROR_NO_INFO = 5
VMGUESTLIB_ERROR_MEMORY = 6
VMGUESTLIB_ERROR_BUFFER_TOO_SMALL = 7
VMGUESTLIB_ERROR_INVALID_HANDLE = 8
VMGUESTLIB_ERROR_INVALID_ARG = 9
VMGUESTLIB_ERROR_UNSUPPORTED_VERSION = 10
VMErrors = (
'VMGUESTLIB_ERROR_SUCCESS',
'VMGUESTLIB_ERROR_OTHER',
'VMGUESTLIB_ERROR_NOT_RUNNING_IN_VM',
'VMGUESTLIB_ERROR_NOT_ENABLED',
'VMGUESTLIB_ERROR_NOT_AVAILABLE',
'VMGUESTLIB_ERROR_NO_INFO',
'VMGUESTLIB_ERROR_MEMORY',
'VMGUESTLIB_ERROR_BUFFER_TOO_SMALL',
'VMGUESTLIB_ERROR_INVALID_HANDLE',
'VMGUESTLIB_ERROR_INVALID_ARG',
'VMGUESTLIB_ERROR_UNSUPPORTED_VERSION',
)
VMErrMsgs = (
'The function has completed successfully.',
'An error has occurred. No additional information about the type of error is available.',
'The program making this call is not running on a VMware virtual machine.',
'The vSphere Guest API is not enabled on this host, so these functions cannot be used. For information about how to enable the library, see "Context Functions" on page 9.',
'The information requested is not available on this host.',
'The handle data structure does not contain any information. You must call VMGuestLib_UpdateInfo to update the data structure.',
'There is not enough memory available to complete the call.',
'The buffer is too small to accommodate the function call. For example, when you call VMGuestLib_GetResourcePoolPath, if the path buffer is too small for the resulting resource pool path, the function returns this error. To resolve this error, allocate a larger buffer.',
'The handle that you used is invalid. Make sure that you have the correct handle and that it is open. It might be necessary to create a new handle using VMGuestLib_OpenHandle.',
'One or more of the arguments passed to the function were invalid.',
'The host does not support the requested statistic.',
)
class VMGuestLibException(Exception):
'''Status code that indicates success orfailure. Each function returns a
VMGuestLibError code. For information about specific error codes, see "vSphere
Guest API Error Codes" on page 15. VMGuestLibError is an enumerated type
defined in vmGuestLib.h.'''
def __init__(self, errno):
self.errno = errno
self.GetErrorText = vmGuestLib.VMGuestLib_GetErrorText
self.GetErrorText.restype = c_char_p
self.message = self.GetErrorText(self.errno)
self.strerr = VMErrMsgs[self.errno]
def __str__(self):
return '%s\n%s' % (self.message, self.strerr)
class VMGuestLib(Structure):
def __init__(self):
# Reference to virtualmachinedata. VMGuestLibHandle is defined in vmGuestLib.h.
self.handle = self.OpenHandle()
self.UpdateInfo()
# Unique identifier for a session. The session ID changes after a virtual machine is
# migrated using VMotion, suspended and resumed, or reverted to a snapshot. Any of
# these events is likely to render any information retrieved with this API invalid. You
# can use the session ID to detect those events and react accordingly. For example, you
# can refresh and reset any state that relies on the validity of previously retrieved
# information.
# Use VMGuestLib_GetSessionId to obtain a valid session ID. A session ID is
# opaque. You cannot compare a virtual machine session ID with the session IDs from
# any other virtual machines. You must always call VMGuestLib_GetSessionId after
# calling VMGuestLib_UpdateInfo.
# VMSessionID is defined in vmSessionId.h
self.sid = self.GetSessionId()
def OpenHandle(self):
'''Gets a handle for use with other vSphere Guest API functions. The guest library
handle provides a context for accessing information about the virtual machine.
Virtual machine statistics and state data are associated with a particular guest library
handle, so using one handle does not affect the data associated with another handle.'''
if hasattr(self, 'handle'):
return self.handle
else:
handle = c_void_p()
ret = vmGuestLib.VMGuestLib_OpenHandle(byref(handle))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return handle
def CloseHandle(self):
'''Releases a handle acquired with VMGuestLib_OpenHandle'''
if hasattr(self, 'handle'):
ret = vmGuestLib.VMGuestLib_CloseHandle(self.handle.value)
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
del (self.handle)
def UpdateInfo(self):
'''Updates information about the virtual machine. This information is associated with
the VMGuestLibHandle.
VMGuestLib_UpdateInfo requires similar CPU resources to a system call and
therefore can affect performance. If you are concerned about performance, minimize
the number of calls to VMGuestLib_UpdateInfo.
If your program uses multiple threads, each thread must use a different handle.
Otherwise, you must implement a locking scheme around update calls. The vSphere
Guest API does not implement internal locking around access with a handle.'''
ret = vmGuestLib.VMGuestLib_UpdateInfo(self.handle.value)
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
def GetSessionId(self):
'''Retrieves the VMSessionID for the current session. Call this function after calling
VMGuestLib_UpdateInfo. If VMGuestLib_UpdateInfo has never been called,
VMGuestLib_GetSessionId returns VMGUESTLIB_ERROR_NO_INFO.'''
sid = c_void_p()
ret = vmGuestLib.VMGuestLib_GetSessionId(self.handle.value, byref(sid))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return sid
def GetCpuLimitMHz(self):
'''Retrieves the upperlimit of processor use in MHz available to the virtual
machine. For information about setting the CPU limit, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuLimitMHz(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuReservationMHz(self):
'''Retrieves the minimum processing power in MHz reserved for the virtual
machine. For information about setting a CPU reservation, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuReservationMHz(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuShares(self):
'''Retrieves the number of CPU shares allocated to the virtual machine. For
information about how an ESX server uses CPU shares to manage virtual
machine priority, see the vSphere Resource Management Guide.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuShares(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuStolenMs(self):
'''Retrieves the number of milliseconds that the virtual machine was in a
ready state (able to transition to a run state), but was not scheduled to run.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetCpuStolenMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuUsedMs(self):
'''Retrieves the number of milliseconds during which the virtual machine
has used the CPU. This value includes the time used by the guest
operating system and the time used by virtualization code for tasks for this
virtual machine. You can combine this value with the elapsed time
(VMGuestLib_GetElapsedMs) to estimate the effective virtual machine
CPU speed. This value is a subset of elapsedMs.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetCpuUsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetElapsedMs(self):
'''Retrieves the number of milliseconds that have passed in the virtual
machine since it last started running on the server. The count of elapsed
time restarts each time the virtual machine is powered on, resumed, or
migrated using VMotion. This value counts milliseconds, regardless of
whether the virtual machine is using processing power during that time.
You can combine this value with the CPU time used by the virtual machine
(VMGuestLib_GetCpuUsedMs) to estimate the effective virtual machine
CPU speed. cpuUsedMs is a subset of this value.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetElapsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostCpuUsedMs(self):
'''Undocumented.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetHostCpuUsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemKernOvhdMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemKernOvhdMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemMappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemMappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemPhysFreeMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysFreeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemPhysMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemSharedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemSharedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemSwappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemUnmappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUnmappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemUsedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostNumCpuCores(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostNumCpuCores(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetHostProcessorSpeed(self):
'''Retrieves the speed of the ESX system's physical CPU in MHz.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostProcessorSpeed(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemActiveMB(self):
'''Retrieves the amount of memory the virtual machine is actively using its
estimated working set size.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemActiveMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemBalloonedMB(self):
'''Retrieves the amount of memory that has been reclaimed from this virtual
machine by the vSphere memory balloon driver (also referred to as the
"vmmemctl" driver).'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemBalloonMaxMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonMaxMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemBalloonTargetMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonTargetMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemLimitMB(self):
'''Retrieves the upper limit of memory that is available to the virtual
machine. For information about setting a memory limit, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemLimitMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemLLSwappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemLLSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemMappedMB(self):
'''Retrieves the amount of memory that is allocated to the virtual machine.
Memory that is ballooned, swapped, or has never been accessed is
excluded.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemMappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemOverheadMB(self):
'''Retrieves the amount of "overhead" memory associated with this virtual
machine that is currently consumed on the host system. Overhead
memory is additional memory that is reserved for data structures required
by the virtualization layer.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemOverheadMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemReservationMB(self):
'''Retrieves the minimum amount of memory that is reserved for the virtual
machine. For information about setting a memory reservation, see "Limits
and Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemReservationMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSharedMB(self):
'''Retrieves the amount of physical memory associated with this virtual
machine that is copy-on-write (COW) shared on the host.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSharedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSharedSavedMB(self):
'''Retrieves the estimated amount of physical memory on the host saved
from copy-on-write (COW) shared guest physical memory.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSharedSavedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemShares(self):
'''Retrieves the number of memory shares allocated to the virtual machine.
For information about how an ESX server uses memory shares to manage
virtual machine priority, see the vSphere Resource Management Guide.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemShares(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSwappedMB(self):
'''Retrieves the amount of memory that has been reclaimed from this virtual
machine by transparently swapping guest memory to disk.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemSwapTargetMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSwapTargetMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemTargetSizeMB(self):
'''Retrieves the size of the target memory allocation for this virtual machine.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemTargetSizeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemUsedMB(self):
'''Retrieves the estimated amount of physical host memory currently
consumed for this virtual machine's physical memory.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemZippedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemZippedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemZipSavedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemZipSavedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# vim:ts=4:sw=4:et
| [
"[email protected]"
] | |
59411623046d6332476124e04690091dcaed47f4 | 25864296fe1d059bba11e999541828ea5eadc5b9 | /DarkSUSY_mH_125/mGammaD_0275/cT_10000/DarkSUSY_LHE_read.py | 67e6e5eb47bd296666d7acc0323970e5aa374aa6 | [] | no_license | bmichlin/MuJetAnalysis_DarkSusySamples_LHE_13TeV_01 | 17965f8eddf65d24a7c3c8ab81f92c3fc21f4f58 | 1de8d11f1a2e86874cd92b9819adbad4a6780b81 | refs/heads/master | 2020-06-14T12:54:38.920627 | 2015-03-18T14:00:07 | 2015-03-18T14:00:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111,093 | py | import ROOT, array, os, re, math, random, string
from math import *
from operator import itemgetter
def getStringBetween(name, first, second):
begOf1 = name.find(first)
endOf1 = len(first) + begOf1
begOf2 = name.find(second)
desiredString = name[endOf1:begOf2]
return desiredString
muonID = 13
higgsID = 25
n1ID = 3000002
nDID = 3000001
nExit = 80002
#nExit = 1000
gammaDID = 3000022
hMass = "125"
n1Mass = "10"
nDMass = "1"
filename = "DarkSUSY_mH_125_mGammaD_0275_13TeV_cT_10000_madgraph452_bridge224_events80k.lhe"
filename = "DarkSUSY_mH_125_mGammaD_0275_13TeV_cT_10000_madgraph452_bridge224_events80k.lhe"
f = open(filename, 'r')
if len(filename) >= 77:
mass_GammaD = getStringBetween(filename, "mGammaD_","_13TeV_cT")
lifetime_GammaD = getStringBetween(filename, "_cT_","_madgraph452")
energy = getStringBetween(filename, mass_GammaD + "_","TeV_")
mass_Higgs = getStringBetween(filename, "_mH_","_mGammaD_")
lifetime_GammaD_Legend = lifetime_GammaD[0:-2] + "." + lifetime_GammaD[len(lifetime_GammaD)-2:len(lifetime_GammaD)]
mass_GammaD_Legend = mass_GammaD[0:-3] + "." + mass_GammaD[len(mass_GammaD)-3:len(lifetime_GammaD)+1]
#mass_GammaD = filename[24:-49]
#lifetime_GammaD = filename[38:-36]
#energy = filename[29:-46]
#mass_Higgs = filename[12:-62]
#lifetime_GammaD_Legend = filename[38:-38] + "." + filename[39:-36]
#mass_GammaD_Legend = filename [24:-52] + "." + filename[25:-49]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "." and len(mass_GammaD_Legend) <= 3: mass_GammaD_Legend = mass_GammaD_Legend + "0"
switch = 0
if lifetime_GammaD_Legend[len(lifetime_GammaD_Legend)-1] == "0":
lifetime_GammaD_Legend = lifetime_GammaD_Legend[:-1]
switch = 1
if lifetime_GammaD_Legend[len(lifetime_GammaD_Legend)-1] == "0" and switch == 1: lifetime_GammaD_Legend = lifetime_GammaD_Legend[:-1]
else:
lifetime_GammaD = "000"
lifetime_GammaD_Legend = "0.00"
mass_GammaD = getStringBetween(filename, "mGammaD_","_13TeV")
energy = getStringBetween(filename, mass_GammaD + "_","TeV")
mass_Higgs = getStringBetween(filename, "_mH_","_mGammaD_")
mass_GammaD_Legend = mass_GammaD[0:-3] + "." + mass_GammaD[len(mass_GammaD)-3:len(lifetime_GammaD)+1]
#mass_GammaD = filename[24:-42]
#energy = filename[29:-39]
#mass_Higgs = filename[12:-55]
#mass_GammaD_Legend = filename[24:-45] + "." + filename[25:-42]
#lifetime_GammaD = "000"
#lifetime_GammaD_Legend = "0.00"
print mass_GammaD
print lifetime_GammaD
print lifetime_GammaD_Legend
print mass_GammaD_Legend
BAM = ROOT.TFile("ValidationPlots_mGammaD_" + mass_GammaD + "_" + energy + "_TeV_cT_" + lifetime_GammaD + ".root" , "RECREATE")
execfile("tdrStyle.py")
cnv = ROOT.TCanvas("cnv", "cnv")
txtHeader = ROOT.TLegend(.17,.935,0.97,1.)
txtHeader.SetFillColor(ROOT.kWhite)
txtHeader.SetFillStyle(0)
txtHeader.SetBorderSize(0)
txtHeader.SetTextFont(42)
txtHeader.SetTextSize(0.045)
txtHeader.SetTextAlign(22)
#txtHeader.SetHeader("CMS Simulation")
txtHeader.SetHeader("CMS Simulation (LHE) " + energy + " TeV")
#txtHeader.SetHeader("CMS Prelim. 2011 #sqrt{s} = 7 TeV L_{int} = 5.3 fb^{-1}")
#txtHeader.SetHeader("CMS 2011 #sqrt{s} = 7 TeV L_{int} = 5.3 fb^{-1}")
#txtHeader.SetHeader("CMS Prelim. 2012 #sqrt{s} = 8 TeV L_{int} = 20.65 fb^{-1}")
#txtHeader.SetHeader("CMS 2012 #sqrt{s} = 8 TeV L_{int} = 20.65 fb^{-1}")
txtHeader.Draw()
#info = ROOT.TLegend(0.33,0.8222222,0.9577778,0.9122222)
info = ROOT.TLegend(0.4566667,0.82,0.7822222,0.9066667)
info.SetFillColor(ROOT.kWhite)
info.SetFillStyle(0)
info.SetBorderSize(0)
info.SetTextFont(42)
info.SetTextSize(0.02777778)
info.SetMargin(0.13)
info.SetHeader("#splitline{pp #rightarrow h #rightarrow 2n_{1} #rightarrow 2n_{D} + 2 #gamma_{D} #rightarrow 2n_{D} + 4#mu}{#splitline{m_{h} = " + mass_Higgs + " GeV, m_{n_{1}} = 10 GeV, m_{n_{D}} = 1 GeV}{m_{#gamma_{D}} = " + mass_GammaD_Legend + " GeV, c#tau_{#gamma_{D}} = " + lifetime_GammaD_Legend + " mm}}" )
#info.SetHeader("#splitline{pp #rightarrow h #rightarrow 2n_{1} #rightarrow 2n_{D} + 2 #gamma_{D} #rightarrow 2n_{D} + 4#mu}{#splitline{#gamma_{D} c#tau = "+lifetime_GammaD_Legend + "mm, Mass = " + mass_GammaD_Legend + "GeV}{M of h = " + hMass + "GeV, M of n_{1} = " + n1Mass + "GeV, M of n_{D} = " + nDMass + "GeV}}" )
txtHeader2 = ROOT.TLegend(0.01333333,0.9311111,0.8133333,0.9955556)
txtHeader2.SetFillColor(ROOT.kWhite)
txtHeader2.SetFillStyle(0)
txtHeader2.SetBorderSize(0)
txtHeader2.SetTextFont(42)
txtHeader2.SetTextSize(0.045)
txtHeader2.SetTextAlign(22)
txtHeader2.SetHeader("CMS Simulation #sqrt{s} = " + energy + " TeV")
################################################################################
# pT of muons
################################################################################
Etmiss_dummy = ROOT.TH1F("Etmiss_dummy","Etmiss_dummy", 100, 0, 100)
Etmiss_dummy.SetTitleOffset(1.5, "Y")
Etmiss_dummy.SetTitleOffset(1.4, "X")
Etmiss_dummy.SetTitleSize(0.04,"X")
Etmiss_dummy.SetXTitle("MET = #sum_{n_{D}}#vec{p_{T}} [GeV]")
Etmiss_dummy.SetYTitle("Fraction of events / 1 GeV")
Etmiss_dummy.SetMaximum( 0.1 )
Etmiss = ROOT.TH1F("Etmiss","Etmiss", 100, 0, 100)
Etmiss.SetLineColor(ROOT.kBlue)
Etmiss.SetLineWidth(2)
Etmiss.SetLineStyle(1)
nBins = 125
binMin = 0.0
binMax = 125.0
yMax = 0.25
cTlow = 0
if float(lifetime_GammaD_Legend) != 0:
cTlim = float(lifetime_GammaD_Legend)*5
binwidth = float(lifetime_GammaD_Legend)
numBins = int(cTlim/binwidth)
binwidthRound = round(binwidth,3)
else:
cTlim = 10
binwidth = 1
numBins = int(cTlim/binwidth)
binwidthRound = "1"
formula = "exp(-x/"+ lifetime_GammaD_Legend +")/("+ lifetime_GammaD_Legend + "*(1 - exp(-" + str(cTlim) + "/" + lifetime_GammaD_Legend + ")))"
print formula
h_gammaD_cT_dummy = ROOT.TH1F("h_gammaD_cT_dummy", "h_gammaD_cT_dummy", numBins, 0, cTlim)
#h_gammaD_cT_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_dummy.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_cT_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_dummy.SetMaximum( 10 )
h_gammaD_cT = ROOT.TH1F("h_gammaD_cT", "h_gammaD_cT", numBins, 0, cTlim)
h_gammaD_cT.SetLineColor(ROOT.kBlue)
h_gammaD_cT.SetLineWidth(2)
h_gammaD_cT.SetLineStyle(1)
h_gammaD_cT_lab_dummy = ROOT.TH1F("h_gammaD_cT_lab_dummy", "h_gammaD_cT_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_lab_dummy.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_cT_lab_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_lab = ROOT.TH1F("h_gammaD_cT_lab", "h_gammaD_cT_lab", numBins, 0, cTlim)
h_gammaD_cT_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_lab.SetLineWidth(2)
h_gammaD_cT_lab.SetLineStyle(1)
h_gammaD_cT_XY_lab_dummy = ROOT.TH1F("h_gammaD_cT_XY_lab_dummy", "h_gammaD_cT_XY_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_XY_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_XY_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_XY_lab_dummy.SetXTitle("L_{XY} of #gamma_{D} [mm]")
h_gammaD_cT_XY_lab_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_XY_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_XY_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_XY_lab = ROOT.TH1F("h_gammaD_cT_XY_lab", "h_gammaD_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_cT_XY_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_XY_lab.SetLineWidth(2)
h_gammaD_cT_XY_lab.SetLineStyle(1)
h_gammaD_cT_Z_lab_dummy = ROOT.TH1F("h_gammaD_cT_Z_lab_dummy", "h_gammaD_cT_Z_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_Z_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_Z_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_Z_lab_dummy.SetXTitle("L_{Z} of #gamma_{D} [mm]")
h_gammaD_cT_Z_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_cT_Z_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_Z_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_Z_lab = ROOT.TH1F("h_gammaD_cT_Z_lab", "h_gammaD_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_cT_Z_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_Z_lab.SetLineWidth(2)
h_gammaD_cT_Z_lab.SetLineStyle(1)
h_gammaD_1_cT_dummy = ROOT.TH1F("h_gammaD_1_cT_dummy", "h_gammaD_1_cT_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_dummy.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_1_cT_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_dummy.SetMaximum( 10 )
h_gammaD_1_cT = ROOT.TH1F("h_gammaD_1_cT", "h_gammaD_1_cT", numBins, 0, cTlim)
h_gammaD_1_cT.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT.SetLineWidth(2)
h_gammaD_1_cT.SetLineStyle(1)
h_gammaD_1_cT_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_lab_dummy", "h_gammaD_1_cT_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_lab_dummy.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_1_cT_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_lab = ROOT.TH1F("h_gammaD_1_cT_lab", "h_gammaD_1_cT_lab", numBins, 0, cTlim)
h_gammaD_1_cT_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_lab.SetLineWidth(2)
h_gammaD_1_cT_lab.SetLineStyle(1)
h_gammaD_1_cT_XY_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_XY_lab_dummy", "h_gammaD_1_cT_XY_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_XY_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_XY_lab_dummy.SetXTitle("L_{XY} of #gamma_{D} [mm]")
h_gammaD_1_cT_XY_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_XY_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_XY_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_XY_lab = ROOT.TH1F("h_gammaD_1_cT_XY_lab", "h_gammaD_1_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_1_cT_XY_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_XY_lab.SetLineWidth(2)
h_gammaD_1_cT_XY_lab.SetLineStyle(1)
h_gammaD_1_cT_Z_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_Z_lab_dummy", "h_gammaD_1_cT_Z_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_Z_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_Z_lab_dummy.SetXTitle("L_{Z} of #gamma_{D} [mm]")
h_gammaD_1_cT_Z_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_Z_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_Z_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_Z_lab = ROOT.TH1F("h_gammaD_1_cT_Z_lab", "h_gammaD_1_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_1_cT_Z_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_Z_lab.SetLineWidth(2)
h_gammaD_1_cT_Z_lab.SetLineStyle(1)
h_gammaD_2_cT = ROOT.TH1F("h_gammaD_2_cT", "h_gammaD_2_cT", numBins, 0, cTlim)
h_gammaD_2_cT.SetLineColor(ROOT.kRed)
h_gammaD_2_cT.SetLineWidth(2)
h_gammaD_2_cT.SetLineStyle(1)
h_gammaD_2_cT_lab = ROOT.TH1F("h_gammaD_2_cT_lab", "h_gammaD_2_cT_lab", numBins, 0, cTlim)
h_gammaD_2_cT_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_lab.SetLineWidth(2)
h_gammaD_2_cT_lab.SetLineStyle(1)
h_gammaD_2_cT_XY_lab = ROOT.TH1F("h_gammaD_2_cT_XY_lab", "h_gammaD_2_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_2_cT_XY_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_XY_lab.SetLineWidth(2)
h_gammaD_2_cT_XY_lab.SetLineStyle(1)
h_gammaD_2_cT_Z_lab = ROOT.TH1F("h_gammaD_2_cT_Z_lab", "h_gammaD_2_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_2_cT_Z_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_Z_lab.SetLineWidth(2)
h_gammaD_2_cT_Z_lab.SetLineStyle(1)
h_muon_pT_dummy = ROOT.TH1F("h_muon_pT_dummy", "h_muon_pT_dummy", nBins, binMin, binMax)
h_muon_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_pT_dummy.SetTitleOffset(1.35, "Y")
h_muon_pT_dummy.SetXTitle("p_{T} of #mu [GeV]")
h_muon_pT_dummy.SetMaximum( 0.2 )
h_higgs_pT_dummy = ROOT.TH1F("h_higgs_pT_dummy", "h_higgs_pT_dummy", 10, 0, 10)
h_higgs_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_pT_dummy.SetTitleOffset(1.35, "Y")
h_higgs_pT_dummy.SetXTitle("p_{T} of h [GeV]")
h_higgs_pT_dummy.SetMaximum( 1.1 )
h_muon_pZ_dummy = ROOT.TH1F("h_muon_pZ_dummy", "h_muon_pZ_dummy", nBins, binMin, binMax)
h_muon_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_pZ_dummy.SetTitleOffset(1.35, "Y")
h_muon_pZ_dummy.SetXTitle("|p_{Z}| of #mu [GeV]")
h_muon_pZ_dummy.SetMaximum( yMax )
h_higgs_pZ_dummy = ROOT.TH1F("h_higgs_pZ_dummy", "h_higgs_pZ_dummy", 50, 0, 500)
h_higgs_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_pZ_dummy.SetTitleOffset(1.35, "Y")
h_higgs_pZ_dummy.SetXTitle("|p_{Z}| of h [GeV]")
h_higgs_pZ_dummy.SetMaximum( 0.1 )
h_muon_Eta_dummy = ROOT.TH1F("h_muon_Eta_dummy", "h_muon_Eta_dummy", 100, -5, 5)
h_muon_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_muon_Eta_dummy.SetTitleOffset(1.35, "Y")
h_muon_Eta_dummy.SetXTitle("#eta of #mu")
h_muon_Eta_dummy.SetMaximum( 0.1 )
#h_higgs_Eta_dummy = ROOT.TH1F("h_higgs_Eta_dummy", "h_higgs_Eta_dummy", 100,-5,5)
#h_higgs_Eta_dummy.SetYTitle("Fraction of events / 0.1 GeV")
#h_higgs_Eta_dummy.SetTitleOffset(1.35, "Y")
#h_higgs_Eta_dummy.SetXTitle("#eta of h [GeV]")
#h_higgs_Eta_dummy.SetMaximum( 0.1 )
h_muon_Phi_dummy = ROOT.TH1F("h_muon_Phi_dummy", "h_muon_Phi_dummy", 80,-4,4)
h_muon_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_muon_Phi_dummy.SetTitleOffset(1.35, "Y")
h_muon_Phi_dummy.SetXTitle("#phi of #mu [rad]")
h_muon_Phi_dummy.SetMaximum( 0.1 )
h_higgs_Phi_dummy = ROOT.TH1F("h_higgs_Phi_dummy", "h_higgs_Phi_dummy", 80,-4,4)
h_higgs_Phi_dummy.SetYTitle("Fraction of events")
h_higgs_Phi_dummy.SetTitleOffset(1.35, "Y")
h_higgs_Phi_dummy.SetXTitle("#phi of h [rad]")
h_higgs_Phi_dummy.SetMaximum( 1.4 )
h_higgs_p_dummy = ROOT.TH1F("h_higgs_p_dummy", "h_higgs_p_dummy", 50, 0, 500)
h_higgs_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_p_dummy.SetTitleOffset(1.35, "Y")
h_higgs_p_dummy.SetXTitle("p of h [GeV]")
h_higgs_p_dummy.SetMaximum( 0.1 )
h_higgs_M_dummy = ROOT.TH1F("h_higgs_M_dummy", "h_higgs_M_dummy", 220, 80.5, 300.5)
h_higgs_M_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_M_dummy.SetTitleOffset(1.35, "Y")
h_higgs_M_dummy.SetXTitle("Mass of h [GeV]")
h_higgs_M_dummy.SetLabelSize(0.03,"X")
h_higgs_M_dummy.SetMaximum( 1.5 )
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_p = ROOT.TH1F("h_higgs_p", "h_higgs_p", 50, 0, 500)
h_higgs_p.SetLineColor(ROOT.kBlue)
h_higgs_p.SetLineWidth(2)
h_higgs_p.SetLineStyle(1)
h_higgs_M = ROOT.TH1F("h_higgs_M", "h_higgs_M", 10, 120.5, 130.5)
h_higgs_M.SetLineColor(ROOT.kBlue)
h_higgs_M.SetLineWidth(2)
h_higgs_M.SetLineStyle(1)
h_higgs_pT = ROOT.TH1F("h_higgs_pT", "h_higgs_pT", 10, 0, 10)
h_higgs_pT.SetLineColor(ROOT.kBlue)
h_higgs_pT.SetLineWidth(2)
h_higgs_pT.SetLineStyle(1)
h_n1_1_pT_dummy = ROOT.TH1F("h_n1_1_pT_dummy", "h_n1_1_pT_dummy", 70, 0, 70)
h_n1_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_pT_dummy.SetXTitle("p_{T} of n_{1} [GeV]")
h_n1_1_pT_dummy.SetMaximum( yMax )
h_higgs_pZ = ROOT.TH1F("h_higgs_pZ", "h_higgs_pZ", 50, 0, 500)
h_higgs_pZ.SetLineColor(ROOT.kBlue)
h_higgs_pZ.SetLineWidth(2)
h_higgs_pZ.SetLineStyle(1)
h_n1_1_pZ_dummy = ROOT.TH1F("h_n1_1_pZ_dummy", "h_n1_1_pZ_dummy", 300, 0, 300)
h_n1_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_pZ_dummy.SetXTitle("|p_{Z}| of n_{1} [GeV]")
h_n1_1_pZ_dummy.SetMaximum( 0.1 )
#h_higgs_Eta = ROOT.TH1F("h_higgs_Eta", "h_higgs_Eta", 50,0,5)
#h_higgs_Eta.SetLineColor(ROOT.kBlue)
#h_higgs_Eta.SetLineWidth(2)
#h_higgs_Eta.SetLineStyle(1)
h_n1_1_Eta_dummy = ROOT.TH1F("h_n1_1_Eta_dummy", "h_n1_1_Eta_dummy", 100,-5,5)
h_n1_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_n1_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_Eta_dummy.SetXTitle("#eta of n_{1}")
h_n1_1_Eta_dummy.SetMaximum( 0.1 )
h_higgs_Phi = ROOT.TH1F("h_higgs_Phi", "h_higgs_Phi", 80,-4,4)
h_higgs_Phi.SetLineColor(ROOT.kBlue)
h_higgs_Phi.SetLineWidth(2)
h_higgs_Phi.SetLineStyle(1)
h_n1_1_Phi_dummy = ROOT.TH1F("h_n1_1_Phi_dummy", "h_n1_1_Phi_dummy", 80,-4,4)
h_n1_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_n1_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_Phi_dummy.SetXTitle("#phi of n_{1} [rad]")
h_n1_1_Phi_dummy.SetMaximum( 0.05 )
h_n1_1_p_dummy = ROOT.TH1F("h_n1_1_p_dummy", "h_n1_1_p_dummy", 300, 0, 300)
h_n1_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_p_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_p_dummy.SetXTitle("p of n_{1} [GeV]")
h_n1_1_p_dummy.SetMaximum( 0.1 )
h_n1_1_M_dummy = ROOT.TH1F("h_n1_1_M_dummy", "h_n1_1_M_dummy", 200, 0.05, 20.05)
h_n1_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_n1_1_M_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_M_dummy.SetXTitle("Mass of n_{1} [GeV]")
h_n1_1_M_dummy.SetMaximum( 1.6 )
h_n1_1_p = ROOT.TH1F("h_n1_1_p", "h_n1_1_p", 300, 0, 300)
h_n1_1_p.SetLineColor(ROOT.kBlue)
h_n1_1_p.SetLineWidth(2)
h_n1_1_p.SetLineStyle(1)
h_n1_1_M = ROOT.TH1F("h_n1_1_M", "h_n1_1_M", 200, 0.05, 20.05)
h_n1_1_M.SetLineColor(ROOT.kBlue)
h_n1_1_M.SetLineWidth(2)
h_n1_1_M.SetLineStyle(1)
h_n1_1_pT = ROOT.TH1F("h_n1_1_pT", "h_n1_1_pT", 70, 0, 70) #this is the peak at 60
h_n1_1_pT.SetLineColor(ROOT.kBlue)
h_n1_1_pT.SetLineWidth(2)
h_n1_1_pT.SetLineStyle(1)
h_n1_1_pZ = ROOT.TH1F("h_n1_1_pZ", "h_n1_1_pZ", 300, 0, 300)
h_n1_1_pZ.SetLineColor(ROOT.kBlue)
h_n1_1_pZ.SetLineWidth(2)
h_n1_1_pZ.SetLineStyle(1)
h_n1_1_Eta = ROOT.TH1F("h_n1_1_Eta", "h_n1_1_Eta", 100,-5,5)
h_n1_1_Eta.SetLineColor(ROOT.kBlue)
h_n1_1_Eta.SetLineWidth(2)
h_n1_1_Eta.SetLineStyle(1)
h_n1_1_Phi = ROOT.TH1F("h_n1_1_Phi", "h_n1_1_Phi", 80,-4,4)
h_n1_1_Phi.SetLineColor(ROOT.kBlue)
h_n1_1_Phi.SetLineWidth(2)
h_n1_1_Phi.SetLineStyle(1)
#h_n1_2_pT_dummy = ROOT.TH1F("h_n1_2_pT_dummy", "h_n1_2_pT_dummy", 700, 0, 70) #this is the peak at ~10GeV
#h_n1_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_pT_dummy.SetXTitle("p_{T n_{1}} [GeV]")
#h_n1_2_pT_dummy.SetMaximum( yMax )
#
#h_n1_2_p_dummy = ROOT.TH1F("h_n1_2_p_dummy", "h_n1_2_p_dummy", 20, 50, 70)
#h_n1_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_p_dummy.SetXTitle("p_{n_{1}} [GeV]")
#h_n1_2_p_dummy.SetMaximum( 0.05 )
#
#h_n1_2_M_dummy = ROOT.TH1F("h_n1_2_M_dummy", "h_n1_2_M_dummy", 200, 0, 20)
#h_n1_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_M_dummy.SetXTitle("m_{n_{1}} [GeV]")
#h_n1_2_M_dummy.SetMaximum( 1.2 )
h_n1_2_p = ROOT.TH1F("h_n1_2_p", "h_n1_2_p", 300, 0, 300)
h_n1_2_p.SetLineColor(ROOT.kRed)
h_n1_2_p.SetLineWidth(2)
h_n1_2_p.SetLineStyle(1)
#h_n1_2_M = ROOT.TH1F("h_n1_2_M", "h_n1_2_M", 200, 0.05, 20.05)
#h_n1_2_M.SetLineColor(ROOT.kRed)
#h_n1_2_M.SetLineWidth(2)
#h_n1_2_M.SetLineStyle(1)
h_n1_2_pT = ROOT.TH1F("h_n1_2_pT", "h_n1_2_pT", 70, 0, 70)
h_n1_2_pT.SetLineColor(ROOT.kRed)
h_n1_2_pT.SetLineWidth(2)
h_n1_2_pT.SetLineStyle(1)
h_nD_1_pT_dummy = ROOT.TH1F("h_nD_1_pT_dummy", "h_nD_1_pT_dummy", 130, 0, 130)
h_nD_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_pT_dummy.SetXTitle("p_{T} of n_{D} [GeV]")
h_nD_1_pT_dummy.SetMaximum( 0.1 )
h_n1_2_pZ = ROOT.TH1F("h_n1_2_pZ", "h_n1_2_pZ", 300, 0, 300)
h_n1_2_pZ.SetLineColor(ROOT.kRed)
h_n1_2_pZ.SetLineWidth(2)
h_n1_2_pZ.SetLineStyle(1)
h_nD_1_pZ_dummy = ROOT.TH1F("h_nD_1_pZ_dummy", "h_nD_1_pZ_dummy", 130, 0, 130)
h_nD_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_pZ_dummy.SetXTitle("|p_{Z}| of n_{D} [GeV]")
h_nD_1_pZ_dummy.SetMaximum( 0.1 )
h_n1_2_Eta = ROOT.TH1F("h_n1_2_Eta", "h_n1_2_Eta", 100,-5,5)
h_n1_2_Eta.SetLineColor(ROOT.kRed)
h_n1_2_Eta.SetLineWidth(2)
h_n1_2_Eta.SetLineStyle(1)
h_nD_1_Eta_dummy = ROOT.TH1F("h_nD_1_Eta_dummy", "h_nD_1_Eta_dummy", 100,-5,5)
h_nD_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_nD_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_Eta_dummy.SetXTitle("#eta of n_{D}")
h_nD_1_Eta_dummy.SetMaximum( 0.1 )
h_n1_2_Phi = ROOT.TH1F("h_n1_2_Phi", "h_n1_2_Phi", 80,-4,4)
h_n1_2_Phi.SetLineColor(ROOT.kRed)
h_n1_2_Phi.SetLineWidth(2)
h_n1_2_Phi.SetLineStyle(1)
h_nD_1_Phi_dummy = ROOT.TH1F("h_nD_1_Phi_dummy", "h_nD_1_Phi_dummy", 80,-4,4)
h_nD_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_nD_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_Phi_dummy.SetXTitle("#phi of n_{D} [rad]")
h_nD_1_Phi_dummy.SetMaximum( 0.05 )
h_nD_1_p_dummy = ROOT.TH1F("h_nD_1_p_dummy", "h_nD_1_p_dummy", 130, 0, 130)
h_nD_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_p_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_p_dummy.SetXTitle("p of n_{D} [GeV]")
h_nD_1_p_dummy.SetMaximum( 0.1 )
h_nD_1_M_dummy = ROOT.TH1F("h_nD_1_M_dummy", "h_nD_1_M_dummy", 20, 0.05, 2.05)
h_nD_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_nD_1_M_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_M_dummy.SetXTitle("Mass of n_{D} [GeV]")
h_nD_1_M_dummy.SetMaximum( 1.6 )
h_nD_1_p = ROOT.TH1F("h_nD_1_p", "h_nD_1_p", 130, 0, 130)
h_nD_1_p.SetLineColor(ROOT.kBlue)
h_nD_1_p.SetLineWidth(2)
h_nD_1_p.SetLineStyle(1)
h_nD_1_M = ROOT.TH1F("h_nD_1_M", "h_nD_1_M", 20, 0.05, 2.05)
h_nD_1_M.SetLineColor(ROOT.kBlue)
h_nD_1_M.SetLineWidth(2)
h_nD_1_M.SetLineStyle(1)
h_nD_1_pT = ROOT.TH1F("h_nD_1_pT", "h_nD_1_pT", 130, 0, 130)
h_nD_1_pT.SetLineColor(ROOT.kBlue)
h_nD_1_pT.SetLineWidth(2)
h_nD_1_pT.SetLineStyle(1)
h_nD_1_pZ = ROOT.TH1F("h_nD_1_pZ", "h_nD_1_pZ", 130, 0, 130)
h_nD_1_pZ.SetLineColor(ROOT.kBlue)
h_nD_1_pZ.SetLineWidth(2)
h_nD_1_pZ.SetLineStyle(1)
h_nD_1_Eta = ROOT.TH1F("h_nD_1_Eta", "h_nD_1_Eta", 100,-5,5)
h_nD_1_Eta.SetLineColor(ROOT.kBlue)
h_nD_1_Eta.SetLineWidth(2)
h_nD_1_Eta.SetLineStyle(1)
h_nD_1_Phi = ROOT.TH1F("h_nD_1_Phi", "h_nD_1_Phi", 80,-4,4)
h_nD_1_Phi.SetLineColor(ROOT.kBlue)
h_nD_1_Phi.SetLineWidth(2)
h_nD_1_Phi.SetLineStyle(1)
#h_nD_2_pT_dummy = ROOT.TH1F("h_nD_2_pT_dummy", "h_nD_2_pT_dummy", 100, 0, 100)
#h_nD_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_pT_dummy.SetXTitle("p_{T nD_2} [GeV]")
#h_nD_2_pT_dummy.SetMaximum( 0.01 )
#
#h_nD_2_p_dummy = ROOT.TH1F("h_nD_2_p_dummy", "h_nD_2_p_dummy", 100, 0, 100)
#h_nD_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_p_dummy.SetXTitle("p_{nD_2} [GeV]")
#h_nD_2_p_dummy.SetMaximum( 0.01 )
#
#h_nD_2_M_dummy = ROOT.TH1F("h_nD_2_M_dummy", "h_nD_2_M_dummy", 20, 0, 2)
#h_nD_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_M_dummy.SetXTitle("m_{nD_2} [GeV]")
#h_nD_2_M_dummy.SetMaximum( 1.2 )
h_nD_2_p = ROOT.TH1F("h_nD_2_p", "h_nD_2_p", 130, 0, 130)
h_nD_2_p.SetLineColor(ROOT.kRed)
h_nD_2_p.SetLineWidth(2)
h_nD_2_p.SetLineStyle(1)
#h_nD_2_M = ROOT.TH1F("h_nD_2_M", "h_nD_2_M", 20, 0.05, 2.05)
#h_nD_2_M.SetLineColor(ROOT.kRed)
#h_nD_2_M.SetLineWidth(2)
#h_nD_2_M.SetLineStyle(1)
h_nD_2_pT = ROOT.TH1F("h_nD_2_pT", "h_nD_2_pT", 130, 0, 130)
h_nD_2_pT.SetLineColor(ROOT.kRed)
h_nD_2_pT.SetLineWidth(2)
h_nD_2_pT.SetLineStyle(1)
h_gammaD_1_pT_dummy = ROOT.TH1F("h_gammaD_1_pT_dummy", "h_gammaD_1_pT_dummy", 100, 0, 100)
h_gammaD_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_pT_dummy.SetXTitle("p_{T} of #gamma_{D} [GeV]")
h_gammaD_1_pT_dummy.SetMaximum( 0.1 )
h_nD_2_pZ = ROOT.TH1F("h_nD_2_pZ", "h_nD_2_pZ", 130, 0, 130)
h_nD_2_pZ.SetLineColor(ROOT.kRed)
h_nD_2_pZ.SetLineWidth(2)
h_nD_2_pZ.SetLineStyle(1)
h_gammaD_1_pZ_dummy = ROOT.TH1F("h_gammaD_1_pZ_dummy", "h_gammaD_1_pZ_dummy", 100, 0, 100)
h_gammaD_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_pZ_dummy.SetXTitle("|p_{Z}| of #gamma_{D} [GeV]")
h_gammaD_1_pZ_dummy.SetMaximum( 0.1 )
h_nD_2_Eta = ROOT.TH1F("h_nD_2_Eta", "h_nD_2_Eta", 100,-5,5)
h_nD_2_Eta.SetLineColor(ROOT.kRed)
h_nD_2_Eta.SetLineWidth(2)
h_nD_2_Eta.SetLineStyle(1)
h_gammaD_1_Eta_dummy = ROOT.TH1F("h_gammaD_1_Eta_dummy", "h_gammaD_1_Eta_dummy",100,-5,5)
h_gammaD_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_gammaD_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_Eta_dummy.SetXTitle("#eta of #gamma_{D}")
h_gammaD_1_Eta_dummy.SetMaximum( 0.1 )
h_nD_2_Phi = ROOT.TH1F("h_nD_2_Phi", "h_nD_2_Phi", 80,-4,4)
h_nD_2_Phi.SetLineColor(ROOT.kRed)
h_nD_2_Phi.SetLineWidth(2)
h_nD_2_Phi.SetLineStyle(1)
h_gammaD_1_Phi_dummy = ROOT.TH1F("h_gammaD_1_Phi_dummy", "h_gammaD_1_Phi_dummy",80,-4,4 )
h_gammaD_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_gammaD_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_Phi_dummy.SetXTitle("#phi of #gamma_{D} [rad]")
h_gammaD_1_Phi_dummy.SetMaximum( 0.05 )
h_gammaD_1_p_dummy = ROOT.TH1F("h_gammaD_1_p_dummy", "h_gammaD_1_p_dummy", 100, 0, 100)
h_gammaD_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_p_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_p_dummy.SetXTitle("p of #gamma_{D} [GeV]")
h_gammaD_1_p_dummy.SetMaximum( 0.1 )
h_gammaD_1_M_dummy = ROOT.TH1F("h_gammaD_1_M_dummy", "h_gammaD_1_M_dummy", 101, 0.1, 10.1)
h_gammaD_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_gammaD_1_M_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_M_dummy.SetXTitle("Mass of #gamma_{D} [GeV]")
h_gammaD_1_M_dummy.SetMaximum( 1.4 )
h_gammaD_1_p = ROOT.TH1F("h_gammaD_1_p", "h_gammaD_1_p", 100, 0, 100)
h_gammaD_1_p.SetLineColor(ROOT.kBlue)
h_gammaD_1_p.SetLineWidth(2)
h_gammaD_1_p.SetLineStyle(1)
h_gammaD_1_M = ROOT.TH1F("h_gammaD_1_M", "h_gammaD_1_M", 101, 0.1, 10.1)
h_gammaD_1_M.SetLineColor(ROOT.kBlue)
h_gammaD_1_M.SetLineWidth(2)
h_gammaD_1_M.SetLineStyle(1)
h_gammaD_1_pT = ROOT.TH1F("h_gammaD_1_pT", "h_gammaD_1_pT", 100, 0, 100)
h_gammaD_1_pT.SetLineColor(ROOT.kBlue)
h_gammaD_1_pT.SetLineWidth(2)
h_gammaD_1_pT.SetLineStyle(1)
h_gammaD_1_pZ = ROOT.TH1F("h_gammaD_1_pZ", "h_gammaD_1_pZ", 100, 0, 100)
h_gammaD_1_pZ.SetLineColor(ROOT.kBlue)
h_gammaD_1_pZ.SetLineWidth(2)
h_gammaD_1_pZ.SetLineStyle(1)
h_gammaD_1_Eta = ROOT.TH1F("h_gammaD_1_Eta", "h_gammaD_1_Eta",100,-5,5)
h_gammaD_1_Eta.SetLineColor(ROOT.kBlue)
h_gammaD_1_Eta.SetLineWidth(2)
h_gammaD_1_Eta.SetLineStyle(1)
h_gammaD_1_Phi = ROOT.TH1F("h_gammaD_1_Phi", "h_gammaD_1_Phi", 80,-4,4)
h_gammaD_1_Phi.SetLineColor(ROOT.kBlue)
h_gammaD_1_Phi.SetLineWidth(2)
h_gammaD_1_Phi.SetLineStyle(1)
#h_gammaD_2_pT_dummy = ROOT.TH1F("h_gammaD_2_pT_dummy", "h_gammaD_2_pT_dummy", 100, 0, 100)
#h_gammaD_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_pT_dummy.SetXTitle("p_{T gammaD_2} [GeV]")
#h_gammaD_2_pT_dummy.SetMaximum( 0.01 )
#
#h_gammaD_2_p_dummy = ROOT.TH1F("h_gammaD_2_p_dummy", "h_gammaD_2_p_dummy", 100, 0, 100)
#h_gammaD_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_p_dummy.SetXTitle("p_{gammaD_2} [GeV]")
#h_gammaD_2_p_dummy.SetMaximum( 0.01 )
#
#h_gammaD_2_M_dummy = ROOT.TH1F("h_gammaD_2_M_dummy", "h_gammaD_2_M_dummy", 300, 0, 3)
#h_gammaD_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_M_dummy.SetXTitle("m_{gammaD_2} [GeV]")
#h_gammaD_2_M_dummy.SetMaximum( 1.2 )
h_gammaD_2_p = ROOT.TH1F("h_gammaD_2_p", "h_gammaD_2_p", 100, 0, 100)
h_gammaD_2_p.SetLineColor(ROOT.kRed)
h_gammaD_2_p.SetLineWidth(2)
h_gammaD_2_p.SetLineStyle(1)
#h_gammaD_2_M = ROOT.TH1F("h_gammaD_2_M", "h_gammaD_2_M", 500, 0.005, 10.005)
#h_gammaD_2_M.SetLineColor(ROOT.kRed)
#h_gammaD_2_M.SetLineWidth(2)
#h_gammaD_2_M.SetLineStyle(1)
h_gammaD_2_pT = ROOT.TH1F("h_gammaD_2_pT", "h_gammaD_2_pT", 100, 0, 100)
h_gammaD_2_pT.SetLineColor(ROOT.kRed)
h_gammaD_2_pT.SetLineWidth(2)
h_gammaD_2_pT.SetLineStyle(1)
h_gammaD_2_pZ = ROOT.TH1F("h_gammaD_2_pZ", "h_gammaD_2_pZ", 100, 0, 100)
h_gammaD_2_pZ.SetLineColor(ROOT.kRed)
h_gammaD_2_pZ.SetLineWidth(2)
h_gammaD_2_pZ.SetLineStyle(1)
h_gammaD_2_Eta = ROOT.TH1F("h_gammaD_2_Eta", "h_gammaD_2_Eta", 100,-5,5)
h_gammaD_2_Eta.SetLineColor(ROOT.kRed)
h_gammaD_2_Eta.SetLineWidth(2)
h_gammaD_2_Eta.SetLineStyle(1)
h_gammaD_2_Phi = ROOT.TH1F("h_gammaD_2_Phi", "h_gammaD_2_Phi", 80,-4,4)
h_gammaD_2_Phi.SetLineColor(ROOT.kRed)
h_gammaD_2_Phi.SetLineWidth(2)
h_gammaD_2_Phi.SetLineStyle(1)
h_muon_pT_0 = ROOT.TH1F("h_muon_pT_0", "h_muon_pT_0", nBins, binMin, binMax)
h_muon_pT_0.SetLineColor(ROOT.kBlue)
h_muon_pT_0.SetLineWidth(2)
h_muon_pT_0.SetLineStyle(1)
h_muon_pT_1 = ROOT.TH1F("h_muon_pT_1", "h_muon_pT_1", nBins, binMin, binMax)
h_muon_pT_1.SetLineColor(ROOT.kGreen)
h_muon_pT_1.SetLineWidth(2)
h_muon_pT_1.SetLineStyle(2)
h_muon_pT_2 = ROOT.TH1F("h_muon_pT_2", "h_muon_pT_2", nBins, binMin, binMax)
h_muon_pT_2.SetLineColor(ROOT.kRed)
h_muon_pT_2.SetLineWidth(2)
h_muon_pT_2.SetLineStyle(3)
h_muon_pT_3 = ROOT.TH1F("h_muon_pT_3", "h_muon_pT_3", nBins, binMin, binMax)
h_muon_pT_3.SetLineColor(ROOT.kBlack)
h_muon_pT_3.SetLineWidth(2)
h_muon_pT_3.SetLineStyle(4)
h_muon_phi_dummy = ROOT.TH1F("h_muon_phi_dummy", "h_muon_phi_dummy", 80, -4, 4)
h_muon_phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_muon_phi_dummy.SetTitleOffset(1.35, "Y")
h_muon_phi_dummy.SetXTitle("#phi of #mu [rad]")
h_muon_phi_dummy.SetMaximum( 0.1 )
h_muon_phi_0 = ROOT.TH1F("h_muon_phi_0", "h_muon_phi_0", 80, -4, 4)
h_muon_phi_0.SetLineColor(ROOT.kBlue)
h_muon_phi_0.SetLineWidth(2)
h_muon_phi_0.SetLineStyle(1)
h_muon_phi_1 = ROOT.TH1F("h_muon_phi_1", "h_muon_phi_1", 80, -4, 4)
h_muon_phi_1.SetLineColor(ROOT.kGreen)
h_muon_phi_1.SetLineWidth(2)
h_muon_phi_1.SetLineStyle(2)
h_muon_phi_2 = ROOT.TH1F("h_muon_phi_2", "h_muon_phi_2", 80, -4, 4)
h_muon_phi_2.SetLineColor(ROOT.kRed)
h_muon_phi_2.SetLineWidth(2)
h_muon_phi_2.SetLineStyle(3)
h_muon_phi_3 = ROOT.TH1F("h_muon_phi_3", "h_muon_phi_3", 80, -4, 4)
h_muon_phi_3.SetLineColor(ROOT.kBlack)
h_muon_phi_3.SetLineWidth(2)
h_muon_phi_3.SetLineStyle(4)
h_muon_p_dummy = ROOT.TH1F("h_muon_p_dummy", "h_muon_p_dummy", 125, 0, 125)
h_muon_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_p_dummy.SetTitleOffset(1.35, "Y")
h_muon_p_dummy.SetXTitle("p of #mu [GeV]")
h_muon_p_dummy.SetMaximum( 0.2 )
h_muon_p_0 = ROOT.TH1F("h_muon_p_0", "h_muon_p_0", 125, 0, 125)
h_muon_p_0.SetLineColor(ROOT.kBlue)
h_muon_p_0.SetLineWidth(2)
h_muon_p_0.SetLineStyle(1)
h_muon_p_1 = ROOT.TH1F("h_muon_p_1", "h_muon_p_1", 125, 0, 125)
h_muon_p_1.SetLineColor(ROOT.kGreen)
h_muon_p_1.SetLineWidth(2)
h_muon_p_1.SetLineStyle(2)
h_muon_p_2 = ROOT.TH1F("h_muon_p_2", "h_muon_p_2", 125, 0, 125)
h_muon_p_2.SetLineColor(ROOT.kRed)
h_muon_p_2.SetLineWidth(2)
h_muon_p_2.SetLineStyle(3)
h_muon_p_3 = ROOT.TH1F("h_muon_p_3", "h_muon_p_3", 125, 0, 125)
h_muon_p_3.SetLineColor(ROOT.kBlack)
h_muon_p_3.SetLineWidth(2)
h_muon_p_3.SetLineStyle(125)
h_muon_pZ_0 = ROOT.TH1F("h_muon_pZ_0", "h_muon_pZ_0", 125, 0, 125)
h_muon_pZ_0.SetLineColor(ROOT.kBlue)
h_muon_pZ_0.SetLineWidth(2)
h_muon_pZ_0.SetLineStyle(1)
h_muon_pZ_1 = ROOT.TH1F("h_muon_pZ_1", "h_muon_pZ_1", 125, 0, 125)
h_muon_pZ_1.SetLineColor(ROOT.kGreen)
h_muon_pZ_1.SetLineWidth(2)
h_muon_pZ_1.SetLineStyle(2)
h_muon_pZ_2 = ROOT.TH1F("h_muon_pZ_2", "h_muon_pZ_2", 125, 0, 125)
h_muon_pZ_2.SetLineColor(ROOT.kRed)
h_muon_pZ_2.SetLineWidth(2)
h_muon_pZ_2.SetLineStyle(3)
h_muon_pZ_3 = ROOT.TH1F("h_muon_pZ_3", "h_muon_pZ_3", 125, 0, 125)
h_muon_pZ_3.SetLineColor(ROOT.kBlack)
h_muon_pZ_3.SetLineWidth(2)
h_muon_pZ_3.SetLineStyle(125)
################################################################################
# eta of muons
################################################################################
nBins = 60
binMin = -3.0
binMax = 3.0
yMax = 0.045
h_muon_eta_dummy = ROOT.TH1F("h_muon_eta_dummy", "h_muon_eta_dummy", 100, -5, 5)
h_muon_eta_dummy.SetYTitle("Fraction of events / 0.1")
h_muon_eta_dummy.GetYaxis().SetNdivisions(508);
h_muon_eta_dummy.SetTitleOffset(1.35, "Y")
h_muon_eta_dummy.SetXTitle("#eta of #mu")
h_muon_eta_dummy.SetMaximum( yMax )
h_muon_eta_0 = ROOT.TH1F("h_muon_eta_0", "h_muon_eta_0", 100,-5,5)
h_muon_eta_0.SetLineColor(ROOT.kBlue)
h_muon_eta_0.SetLineWidth(2)
h_muon_eta_0.SetLineStyle(1)
h_muon_eta_1 = ROOT.TH1F("h_muon_eta_1", "h_muon_eta_1", 100,-5,5)
h_muon_eta_1.SetLineColor(ROOT.kGreen)
h_muon_eta_1.SetLineWidth(2)
h_muon_eta_1.SetLineStyle(2)
h_muon_eta_2 = ROOT.TH1F("h_muon_eta_2", "h_muon_eta_2", 100,-5,5)
h_muon_eta_2.SetLineColor(ROOT.kRed)
h_muon_eta_2.SetLineWidth(2)
h_muon_eta_2.SetLineStyle(3)
h_muon_eta_3 = ROOT.TH1F("h_muon_eta_3", "h_muon_eta_3", 100,-5,5)
h_muon_eta_3.SetLineColor(ROOT.kBlack)
h_muon_eta_3.SetLineWidth(2)
h_muon_eta_3.SetLineStyle(4)
################################################################################
# mass of dimuons
################################################################################
nBins = 125
binMin = 0.0
binMax = 125.0
yMax = 0.4
#h_dimuon_m_dummy = ROOT.TH1F("h_dimuon_m_dummy", "h_dimuon_m_dummy", nBins, binMin, binMax)
#h_dimuon_m_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_0 = ROOT.TH1F("h_dimuon_m_0", "h_dimuon_m_0", nBins, binMin, binMax)
#h_dimuon_m_0.SetLineColor(ROOT.kBlue)
#h_dimuon_m_0.SetLineWidth(2)
#h_dimuon_m_0.SetLineStyle(1)
#
#h_dimuon_m_1 = ROOT.TH1F("h_dimuon_m_1", "h_dimuon_m_1", nBins, binMin, binMax)
#h_dimuon_m_1.SetLineColor(ROOT.kGreen)
#h_dimuon_m_1.SetLineWidth(2)
#h_dimuon_m_1.SetLineStyle(2)
#
#h_dimuon_m_2 = ROOT.TH1F("h_dimuon_m_2", "h_dimuon_m_2", nBins, binMin, binMax)
#h_dimuon_m_2.SetLineColor(ROOT.kRed)
#h_dimuon_m_2.SetLineWidth(2)
#h_dimuon_m_2.SetLineStyle(3)
#
#h_dimuon_m_3 = ROOT.TH1F("h_dimuon_m_3", "h_dimuon_m_3", nBins, binMin, binMax)
#h_dimuon_m_3.SetLineColor(ROOT.kBlack)
#h_dimuon_m_3.SetLineWidth(2)
#h_dimuon_m_3.SetLineStyle(4)
#
#h_dimuon_m_log_dummy = ROOT.TH1F("h_dimuon_m_log_dummy", "h_dimuon_m_log_dummy", nBins, binMin, binMax)
#h_dimuon_m_log_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_log_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_log_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_log_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_log_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_log_0 = ROOT.TH1F("h_dimuon_m_log_0", "h_dimuon_m_log_0", nBins, binMin, binMax)
#h_dimuon_m_log_0.SetLineColor(ROOT.kBlue)
#h_dimuon_m_log_0.SetLineWidth(2)
#h_dimuon_m_log_0.SetLineStyle(1)
#
#h_dimuon_m_log_1 = ROOT.TH1F("h_dimuon_m_log_1", "h_dimuon_m_log_1", nBins, binMin, binMax)
#h_dimuon_m_log_1.SetLineColor(ROOT.kGreen)
#h_dimuon_m_log_1.SetLineWidth(2)
#h_dimuon_m_log_1.SetLineStyle(2)
#
#h_dimuon_m_log_2 = ROOT.TH1F("h_dimuon_m_log_2", "h_dimuon_m_log_2", nBins, binMin, binMax)
#h_dimuon_m_log_2.SetLineColor(ROOT.kRed)
#h_dimuon_m_log_2.SetLineWidth(2)
#h_dimuon_m_log_2.SetLineStyle(3)
#
#h_dimuon_m_log_3 = ROOT.TH1F("h_dimuon_m_log_3", "h_dimuon_m_log_3", nBins, binMin, binMax)
#h_dimuon_m_log_3.SetLineColor(ROOT.kBlack)
#h_dimuon_m_log_3.SetLineWidth(2)
#h_dimuon_m_log_3.SetLineStyle(4)
#
#h_dimuon_m_real_fake_dummy = ROOT.TH1F("h_dimuon_m_real_fake_dummy", "h_dimuon_m_real_fake_dummy", nBins, binMin, binMax)
#h_dimuon_m_real_fake_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_real_fake_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_real_fake_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_real_fake_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_real_fake_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_real_fake_0 = ROOT.TH1F("h_dimuon_m_real_fake_0", "h_dimuon_m_real_fake_0", nBins, binMin, binMax)
#h_dimuon_m_real_fake_0.SetLineColor(ROOT.kRed)
#h_dimuon_m_real_fake_0.SetLineWidth(2)
#h_dimuon_m_real_fake_0.SetLineStyle(1)
#
#h_dimuon_m_real_fake_1 = ROOT.TH1F("h_dimuon_m_real_fake_1", "h_dimuon_m_real_fake_1", nBins, binMin, binMax)
#h_dimuon_m_real_fake_1.SetLineColor(ROOT.kBlue)
#h_dimuon_m_real_fake_1.SetLineWidth(2)
#h_dimuon_m_real_fake_1.SetLineStyle(2)
#
#h_dimuon_m_real_fake_log_dummy = ROOT.TH1F("h_dimuon_m_real_fake_log_dummy", "h_dimuon_m_real_fake_log_dummy", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_real_fake_log_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_real_fake_log_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_real_fake_log_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_real_fake_log_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_real_fake_log_0 = ROOT.TH1F("h_dimuon_m_real_fake_log_0", "h_dimuon_m_real_fake_log_0", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_0.SetLineColor(ROOT.kRed)
#h_dimuon_m_real_fake_log_0.SetLineWidth(2)
#h_dimuon_m_real_fake_log_0.SetLineStyle(1)
#
#h_dimuon_m_real_fake_log_1 = ROOT.TH1F("h_dimuon_m_real_fake_log_1", "h_dimuon_m_real_fake_log_1", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_1.SetLineColor(ROOT.kBlue)
#h_dimuon_m_real_fake_log_1.SetLineWidth(2)
#h_dimuon_m_real_fake_log_1.SetLineStyle(2)
#########################
h_dimuon_m_fake_log_dummy = ROOT.TH1F("h_dimuon_m_fake_log_dummy", "h_dimuon_m_fake_log_dummy", 1250, 0, 125)
h_dimuon_m_fake_log_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_dimuon_m_fake_log_dummy.GetYaxis().SetNdivisions(508);
h_dimuon_m_fake_log_dummy.SetTitleOffset(1.4, "Y")
h_dimuon_m_fake_log_dummy.SetXTitle("Mass of Fake #mu#mu [GeV]")
h_dimuon_m_fake_log_dummy.SetMaximum( 1 )
h_dimuon_m_fake_log_0 = ROOT.TH1F("h_dimuon_m_fake_log_0", "h_dimuon_m_fake_log_0", 1250, 0, 125)
h_dimuon_m_fake_log_0.SetLineColor(ROOT.kRed)
h_dimuon_m_fake_log_0.SetLineWidth(2)
h_dimuon_m_fake_log_0.SetLineStyle(1)
h_dimuon_m_fake_dummy = ROOT.TH1F("h_dimuon_m_fake_dummy", "h_dimuon_m_fake_dummy", nBins, binMin, binMax)
h_dimuon_m_fake_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_m_fake_dummy.GetYaxis().SetNdivisions(508);
h_dimuon_m_fake_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_m_fake_dummy.SetXTitle("Mass of Fake #mu#mu [GeV]")
h_dimuon_m_fake_dummy.SetMaximum( 1.2 )
h_dimuon_m_fake_0 = ROOT.TH1F("h_dimuon_m_fake_0", "h_dimuon_m_fake_0", nBins, binMin, binMax)
h_dimuon_m_fake_0.SetLineColor(ROOT.kRed)
h_dimuon_m_fake_0.SetLineWidth(2)
h_dimuon_m_fake_0.SetLineStyle(1)
################################################################################
# mass of 2 selected dimuons
################################################################################
m_min = 0.2113
m_max = 3.5536
m_bins = 66
h_m1_vs_m2 = ROOT.TH2F("h_m1_vs_m2", "h_m1_vs_m2", m_bins, m_min, m_max, m_bins, m_min, m_max)
h_m1_vs_m2.SetYTitle("m_{1#mu#mu} [GeV]")
h_m1_vs_m2.SetTitleOffset(1.3, "Y")
h_m1_vs_m2.SetXTitle("m_{2#mu#mu} [GeV]")
h_m1 = ROOT.TH1F("h_m1", "h_m1", 101, 0.1, 10.1)
h_m1.SetLineColor(ROOT.kRed)
h_m1.SetLineWidth(2)
h_m1.SetLineStyle(1)
h_m2 = ROOT.TH1F("h_m2", "h_m2", 101, 0.1, 10.1)
h_m2.SetYTitle("Events / 0.1 GeV")
h_m2.SetXTitle("m_{#mu#mu} [GeV]")
h_m2.SetTitleOffset(1.35, "Y")
h_m2.SetLineColor(ROOT.kBlue)
h_m2.SetLineWidth(2)
h_m2.SetLineStyle(1)
h_m2.SetMaximum(110000)
h_dimuon_1_pT_dummy = ROOT.TH1F("h_dimuon_1_pT_dummy", "h_dimuon_1_pT_dummy", 100, 0, 100)
h_dimuon_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_pT_dummy.SetXTitle("p_{T} of #mu#mu [GeV]")
h_dimuon_1_pT_dummy.SetMaximum( 0.1 )
h_dimuon_1_pZ_dummy = ROOT.TH1F("h_dimuon_1_pZ_dummy", "h_dimuon_1_pZ_dummy", 100, 0, 100)
h_dimuon_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_pZ_dummy.SetXTitle("|p_{Z}| of #mu#mu [GeV]")
h_dimuon_1_pZ_dummy.SetMaximum( 0.1 )
h_dimuon_1_Eta_dummy = ROOT.TH1F("h_dimuon_1_Eta_dummy", "h_dimuon_1_Eta_dummy",100,-5,5)
h_dimuon_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_dimuon_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_Eta_dummy.SetXTitle("#eta of #mu#mu")
h_dimuon_1_Eta_dummy.SetMaximum( 0.1 )
h_dimuon_1_Phi_dummy = ROOT.TH1F("h_dimuon_1_Phi_dummy", "h_dimuon_1_Phi_dummy",80,-4,4 )
h_dimuon_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_dimuon_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_Phi_dummy.SetXTitle("#phi of #mu#mu [rad]")
h_dimuon_1_Phi_dummy.SetMaximum( 0.05 )
h_dimuon_1_p_dummy = ROOT.TH1F("h_dimuon_1_p_dummy", "h_dimuon_1_p_dummy", 100, 0, 100)
h_dimuon_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_p_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_p_dummy.SetXTitle("p of #mu#mu [GeV]")
h_dimuon_1_p_dummy.SetMaximum( 0.1 )
h_dimuon_1_M_dummy = ROOT.TH1F("h_dimuon_1_M_dummy", "h_dimuon_1_M_dummy", 50, 0.5, 10.005)
h_dimuon_1_M_dummy.SetYTitle("Fraction of events / 0.2 GeV")
h_dimuon_1_M_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_M_dummy.SetXTitle("Mass of #mu#mu [GeV]")
h_dimuon_1_M_dummy.SetMaximum( 1.4 )
h_dimuon_1_p = ROOT.TH1F("h_dimuon_1_p", "h_dimuon_1_p", 100, 0, 100)
h_dimuon_1_p.SetLineColor(ROOT.kBlue)
h_dimuon_1_p.SetLineWidth(2)
h_dimuon_1_p.SetLineStyle(1)
h_dimuon_1_M = ROOT.TH1F("h_dimuon_1_M", "h_dimuon_1_M", 500, 0.005, 10.005)
h_dimuon_1_M.SetLineColor(ROOT.kBlue)
h_dimuon_1_M.SetLineWidth(2)
h_dimuon_1_M.SetLineStyle(1)
h_dimuon_1_pT = ROOT.TH1F("h_dimuon_1_pT", "h_dimuon_1_pT", 100, 0, 100)
h_dimuon_1_pT.SetLineColor(ROOT.kBlue)
h_dimuon_1_pT.SetLineWidth(2)
h_dimuon_1_pT.SetLineStyle(1)
h_dimuon_1_pZ = ROOT.TH1F("h_dimuon_1_pZ", "h_dimuon_1_pZ", 100, 0, 100)
h_dimuon_1_pZ.SetLineColor(ROOT.kBlue)
h_dimuon_1_pZ.SetLineWidth(2)
h_dimuon_1_pZ.SetLineStyle(1)
h_dimuon_1_Eta = ROOT.TH1F("h_dimuon_1_Eta", "h_dimuon_1_Eta",100,-5,5)
h_dimuon_1_Eta.SetLineColor(ROOT.kBlue)
h_dimuon_1_Eta.SetLineWidth(2)
h_dimuon_1_Eta.SetLineStyle(1)
h_dimuon_1_Phi = ROOT.TH1F("h_dimuon_1_Phi", "h_dimuon_1_Phi", 80,-4,4)
h_dimuon_1_Phi.SetLineColor(ROOT.kBlue)
h_dimuon_1_Phi.SetLineWidth(2)
h_dimuon_1_Phi.SetLineStyle(1)
h_dimuon_2_p = ROOT.TH1F("h_dimuon_2_p", "h_dimuon_2_p", 100, 0, 100)
h_dimuon_2_p.SetLineColor(ROOT.kRed)
h_dimuon_2_p.SetLineWidth(2)
h_dimuon_2_p.SetLineStyle(1)
h_dimuon_2_pT = ROOT.TH1F("h_dimuon_2_pT", "h_dimuon_2_pT", 100, 0, 100)
h_dimuon_2_pT.SetLineColor(ROOT.kRed)
h_dimuon_2_pT.SetLineWidth(2)
h_dimuon_2_pT.SetLineStyle(1)
h_dimuon_2_pZ = ROOT.TH1F("h_dimuon_2_pZ", "h_dimuon_2_pZ", 100, 0, 100)
h_dimuon_2_pZ.SetLineColor(ROOT.kRed)
h_dimuon_2_pZ.SetLineWidth(2)
h_dimuon_2_pZ.SetLineStyle(1)
h_dimuon_2_Eta = ROOT.TH1F("h_dimuon_2_Eta", "h_dimuon_2_Eta", 100,-5,5)
h_dimuon_2_Eta.SetLineColor(ROOT.kRed)
h_dimuon_2_Eta.SetLineWidth(2)
h_dimuon_2_Eta.SetLineStyle(1)
h_dimuon_2_Phi = ROOT.TH1F("h_dimuon_2_Phi", "h_dimuon_2_Phi", 80,-4,4)
h_dimuon_2_Phi.SetLineColor(ROOT.kRed)
h_dimuon_2_Phi.SetLineWidth(2)
h_dimuon_2_Phi.SetLineStyle(1)
################################################################################
# BAM Functions
################################################################################
def plotOverflow(hist):
name = hist.GetName()
title = hist.GetTitle()
nx = hist.GetNbinsX()+1
x1 = hist.GetBinLowEdge(1)
bw = hist.GetBinWidth(nx)
x2 = hist.GetBinLowEdge(nx)+bw
htmp = ROOT.TH1F(name, title, nx, x1, x2)
for i in range(1, nx):
htmp.Fill(htmp.GetBinCenter(i), hist.GetBinContent(i))
htmp.Fill(hist.GetNbinsX()-1, hist.GetBinContent(0))
htmp.SetEntries(hist.GetEntries())
htmp.SetLineColor(hist.GetLineColor())
htmp.SetLineWidth(hist.GetLineWidth())
htmp.SetLineStyle(hist.GetLineStyle())
htmp.DrawNormalized("same")
return
def integral(hist):
eachBinWidth = hist.GetBinWidth(hist.GetNbinsX()+1)
#print "Begin Integral"
#print eachBinWidth
runningSum = 0
for i in range(0, hist.GetNbinsX()+1):
area = eachBinWidth * hist.GetBinContent(i)
runningSum = runningSum + area
#print i
#print area
return runningSum
def getEta(pz, p):
output = atanh(pz/p)
return output
def scaleAxisY(hist, dummy):
normFactor = hist.Integral()
max = hist.GetBinContent(hist.GetMaximumBin()) / normFactor
scale = 1.8
newMax = scale*max
dummy.SetMaximum(newMax)
def scaleAxisYcT(hist, dummy):
normFactor = integral(hist)
max = hist.GetBinContent(hist.GetMaximumBin()) / normFactor
scale = 1.8
newMax = scale*max
dummy.SetMaximum(newMax)
################################################################################
# Loop over events
################################################################################
nEvents = 0
isEvent = False
nEventsOK = 0
for line in f:
if line == '<event>\n':
isEvent = True
isEvent = True
nEvents = nEvents + 1
nLinesInEvent = 0
nParticlesInEvent = 0
muons = []
dimuons = []
DimuonIndex1 = []
DimuonIndex2 = []
bamDimuons = []
FakeIndex1 = []
FakeIndex2 = []
FakeDimuons = []
lifetimes = []
higgs = []
neutralinos = []
darkNeutralinos = []
gammaDs = []
n1PlotCounter = 0
gammaDPlotCounter = 0
nDPlotCounter = 0
if nEvents > nExit: break
continue
if line == '</event>\n':
isEvent = False
continue
if isEvent == True:
nLinesInEvent = nLinesInEvent + 1
#***************************************************************************
# first line with common event information
#***************************************************************************
if nLinesInEvent == 1:
word_n = 0
# print "I", line
for word in line.split():
word_n = word_n + 1
if word_n == 1: NUP = int(word) # number of particles in the event
if word_n == 2: IDPRUP = int(word) # process type
if word_n == 3: XWGTUP = float(word) # event weight
if word_n == 4: SCALUP = float(word) # factorization scale Q
if word_n == 5: AQEDUP = float(word) # the QED coupling alpha_em
if word_n == 6: AQCDUP = float(word) # the QCD coupling alpha_s
if word_n > 6: print "Warning! Wrong common event information", line
#***************************************************************************
# line with particle information
#***************************************************************************
if nLinesInEvent >= 2:
nParticlesInEvent = nParticlesInEvent + 1
word_n = 0
# print "P", line
for word in line.split():
word_n = word_n + 1
if word_n == 1: IDUP = int(word) # particle PDG identity code
if word_n == 2: ISTUP = int(word) # status code
if word_n == 3: MOTHUP1 = int(word) # position of the first mother of particle
if word_n == 4: MOTHUP2 = int(word) # position of the last mother of particle
if word_n == 5: ICOLUP1 = int(word) # tag for the colour flow info
if word_n == 6: ICOLUP2 = int(word) # tag for the colour flow info
if word_n == 7: PUP1 = float(word) # px in GeV
if word_n == 8: PUP2 = float(word) # py in GeV
if word_n == 9: PUP3 = float(word) # pz in GeV
if word_n == 10: PUP4 = float(word) # E in GeV
if word_n == 11: PUP5 = float(word) # m in GeV
if word_n == 12: VTIMUP = float(word) # invariant lifetime ctau in mm
if word_n == 13: SPINUP = float(word) # cosine of the angle between the spin vector of a particle and its three-momentum
if word_n > 13: print "Warning! Wrong particle line", line
if abs(IDUP) == muonID:
if IDUP > 0: q = -1
if IDUP < 0: q = 1
v4 = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
muons.append(( q, v4.Px(), v4.Py(), v4.Pz(), v4.E(), v4.M(), v4.Pt(), v4.Eta(), v4.Phi(), MOTHUP1 ))
if abs(IDUP) == higgsID:
if IDUP > 0: q = 0
if IDUP < 0: q = 0
vHiggs = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
higgs.append((q, vHiggs.Px(), vHiggs.Py(), vHiggs.Pz(), vHiggs.E(), vHiggs.M(), vHiggs.Pt(), vHiggs.Eta(), vHiggs.Phi() ))
h_higgs_pT.Fill( higgs[len(higgs)-1][6] )
h_higgs_M.Fill( higgs[len(higgs)-1][5] )
h_higgs_p.Fill( sqrt( higgs[len(higgs)-1][1]*higgs[len(higgs)-1][1] + higgs[len(higgs)-1][2]*higgs[len(higgs)-1][2] + higgs[len(higgs)-1][3]*higgs[len(higgs)-1][3] ) )
h_higgs_pZ.Fill( fabs(higgs[len(higgs)-1][3]) )
#h_higgs_Eta.Fill( higgs[len(higgs)-1][7] )
h_higgs_Phi.Fill( higgs[len(higgs)-1][8] )
if abs(IDUP) == n1ID:
q = 0
vNeutralino = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
neutralinos.append((q, vNeutralino.Px(), vNeutralino.Py(), vNeutralino.Pz(), vNeutralino.E(), vNeutralino.M(), vNeutralino.Pt(), vNeutralino.Eta(), vNeutralino.Phi() ))
if len(neutralinos) == 2 and n1PlotCounter == 0:
neutralinos_sorted_pT = sorted(neutralinos, key=itemgetter(6), reverse=True)
neutralinos = neutralinos_sorted_pT
h_n1_1_pT.Fill( neutralinos[0][6] )
h_n1_2_pT.Fill( neutralinos[1][6] )
h_n1_1_p.Fill( sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ) )
h_n1_2_p.Fill( sqrt( neutralinos[1][1]*neutralinos[1][1] + neutralinos[1][2]*neutralinos[1][2] + neutralinos[1][3]*neutralinos[1][3] ) )
h_n1_1_M.Fill( neutralinos[0][5] )
h_n1_1_M.Fill( neutralinos[1][5] )
h_n1_1_pZ.Fill( fabs(neutralinos[0][3]) )
h_n1_2_pZ.Fill( fabs(neutralinos[1][3]) )
h_n1_1_Eta.Fill( getEta(neutralinos[0][3],(sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ))) )
h_n1_1_Phi.Fill( neutralinos[0][8] )
h_n1_2_Eta.Fill( getEta(neutralinos[1][3], sqrt( neutralinos[1][1]*neutralinos[1][1] + neutralinos[1][2]*neutralinos[1][2] + neutralinos[1][3]*neutralinos[1][3] )) )
#print "PUP3, PZ, P, ETA:"
#print neutralinos[0][7]
#print neutralinos[0][3]
#print (sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ))
#print getEta(neutralinos[0][3],(sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] )))
h_n1_2_Phi.Fill( neutralinos[1][8] )
n1PlotCounter = 1
if abs(IDUP) == nDID:
q = 0
vDarkNeutralino = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
darkNeutralinos.append((q, vDarkNeutralino.Px(), vDarkNeutralino.Py(), vDarkNeutralino.Pz(), vDarkNeutralino.E(), vDarkNeutralino.M(), vDarkNeutralino.Pt(), vDarkNeutralino.Eta(), vDarkNeutralino.Phi() ))
if len(darkNeutralinos) == 2 and nDPlotCounter == 0:
darkNeutralinos_sorted_pT = sorted(darkNeutralinos, key=itemgetter(6), reverse=True)
darkNeutralinos = darkNeutralinos_sorted_pT
h_nD_1_pT.Fill( darkNeutralinos[0][6] )
h_nD_2_pT.Fill( darkNeutralinos[1][6] )
h_nD_1_p.Fill( sqrt( darkNeutralinos[0][1]*darkNeutralinos[0][1] + darkNeutralinos[0][2]*darkNeutralinos[0][2] + darkNeutralinos[0][3]*darkNeutralinos[0][3] ) )
h_nD_2_p.Fill( sqrt( darkNeutralinos[1][1]*darkNeutralinos[1][1] + darkNeutralinos[1][2]*darkNeutralinos[1][2] + darkNeutralinos[1][3]*darkNeutralinos[1][3] ) )
h_nD_1_M.Fill( darkNeutralinos[0][5] )
h_nD_1_M.Fill( darkNeutralinos[1][5] )
h_nD_1_pZ.Fill( fabs(darkNeutralinos[0][3]) )
h_nD_2_pZ.Fill( fabs(darkNeutralinos[1][3]) )
h_nD_1_Eta.Fill( getEta(darkNeutralinos[0][3], sqrt( darkNeutralinos[0][1]*darkNeutralinos[0][1] + darkNeutralinos[0][2]*darkNeutralinos[0][2] + darkNeutralinos[0][3]*darkNeutralinos[0][3] )) )
h_nD_1_Phi.Fill( darkNeutralinos[0][8] )
h_nD_2_Eta.Fill( getEta(darkNeutralinos[1][3], sqrt( darkNeutralinos[1][1]*darkNeutralinos[1][1] + darkNeutralinos[1][2]*darkNeutralinos[1
][2] + darkNeutralinos[1][3]*darkNeutralinos[1][3] )) )
h_nD_2_Phi.Fill( darkNeutralinos[1][8] )
vectorSum =( ( darkNeutralinos[0][1] + darkNeutralinos[1][1] )*( darkNeutralinos[0][1] + darkNeutralinos[1][1] ) ) + ( (darkNeutralinos[0][2] + darkNeutralinos[1][2])*(darkNeutralinos[0][2] + darkNeutralinos[1][2]) )
Etmiss.Fill(vectorSum)
nDPlotCounter = 1
if abs(IDUP) == gammaDID:
q = 0
vgammaDs = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
gammaDs.append(( q, vgammaDs.Px(), vgammaDs.Py(), vgammaDs.Pz(), vgammaDs.E(), vgammaDs.M(), vgammaDs.Pt(), vgammaDs.Eta(), vgammaDs.Phi()))
h_gammaD_cT.Fill( VTIMUP )
pmom = sqrt( vgammaDs.Px()*vgammaDs.Px() + vgammaDs.Py()*vgammaDs.Py() + vgammaDs.Pz()*vgammaDs.Pz() )
beta = pmom/(sqrt(vgammaDs.M()*vgammaDs.M() + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_cT_lab.Fill( lorentz*VTIMUP )
pmomxy = sqrt( vgammaDs.Px()*vgammaDs.Px() + vgammaDs.Py()*vgammaDs.Py() )
betaxy = pmomxy/sqrt( vgammaDs.M()*vgammaDs.M() + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_cT_XY_lab.Fill( lorentzxy*VTIMUP )
pmomz = sqrt( vgammaDs.Pz()*vgammaDs.Pz() )
betaz = pmomz/sqrt( vgammaDs.M()*vgammaDs.M() + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_cT_Z_lab.Fill( lorentzZ * VTIMUP )
lifetimes.append( (VTIMUP, vgammaDs.Px(), vgammaDs.Py(), vgammaDs.Pz(), vgammaDs.Pt(), vgammaDs.M() ))
if len(gammaDs) == 2 and gammaDPlotCounter == 0:
gammaDs_sorted_pT = sorted(gammaDs, key=itemgetter(6), reverse=True)
gammaDs = gammaDs_sorted_pT
lifetimes_sorted_pT = sorted(lifetimes, key=itemgetter(4), reverse=True)
lifetimes = lifetimes_sorted_pT
h_gammaD_1_cT.Fill( lifetimes[0][0] )
pmom = sqrt( lifetimes[0][1]*lifetimes[0][1] + lifetimes[0][2]*lifetimes[0][2] + lifetimes[0][3]*lifetimes[0][3] )
beta = pmom/(sqrt(lifetimes[0][5]*lifetimes[0][5] + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_1_cT_lab.Fill( lorentz*lifetimes[0][0] )
#print "pmom, beta, lorentz"
#print pmom
#print beta
#print lorentz
#print lorentz*lifetimes[0][0]
pmomxy = sqrt( lifetimes[0][1]*lifetimes[0][1] + lifetimes[0][2]*lifetimes[0][2] )
betaxy = pmomxy/sqrt( lifetimes[0][5]*lifetimes[0][5] + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_1_cT_XY_lab.Fill( lorentzxy*lifetimes[0][0] )
pmomz = sqrt( lifetimes[0][3]*lifetimes[0][3] )
betaz = pmomz/sqrt( lifetimes[0][5]*lifetimes[0][5] + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_1_cT_Z_lab.Fill( lorentzZ * lifetimes[0][0] )
h_gammaD_2_cT.Fill( lifetimes[1][0] )
pmom = sqrt( lifetimes[1][1]*lifetimes[1][1] + lifetimes[1][2]*lifetimes[1][2] + lifetimes[1][3]*lifetimes[1][3] )
beta = pmom/(sqrt(lifetimes[1][5]*lifetimes[1][5] + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_2_cT_lab.Fill( lorentz*lifetimes[1][0] )
pmomxy = sqrt( lifetimes[1][1]*lifetimes[1][1] + lifetimes[1][2]*lifetimes[1][2] )
betaxy = pmomxy/sqrt( lifetimes[1][5]*lifetimes[1][5] + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_2_cT_XY_lab.Fill( lorentzxy*lifetimes[1][0] )
pmomz = sqrt( lifetimes[1][3]*lifetimes[1][3] )
betaz = pmomz/sqrt( lifetimes[1][5]*lifetimes[1][5] + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_2_cT_Z_lab.Fill( lorentzZ * lifetimes[1][0] )
h_gammaD_1_pT.Fill( gammaDs[0][6] )
h_gammaD_2_pT.Fill( gammaDs[1][6] )
h_gammaD_1_p.Fill( sqrt( gammaDs[0][1]*gammaDs[0][1] + gammaDs[0][2]*gammaDs[0][2] + gammaDs[0][3]*gammaDs[0][3] ) )
h_gammaD_2_p.Fill( sqrt( gammaDs[1][1]*gammaDs[1][1] + gammaDs[1][2]*gammaDs[1][2] + gammaDs[1][3]*gammaDs[1][3] ) )
h_gammaD_1_M.Fill( gammaDs[0][5] )
h_gammaD_1_M.Fill( gammaDs[1][5] )
h_gammaD_1_pZ.Fill( fabs(gammaDs[0][3]) )
h_gammaD_2_pZ.Fill( fabs(gammaDs[1][3]) )
h_gammaD_1_Eta.Fill( getEta(gammaDs[0][3], sqrt( gammaDs[0][1]*gammaDs[0][1] + gammaDs[0][2]*gammaDs[0][2] + gammaDs[0][3]*gammaDs[0][3] ) ) )
h_gammaD_1_Phi.Fill( gammaDs[0][8] )
h_gammaD_2_Eta.Fill( getEta(gammaDs[1][3], sqrt( gammaDs[1][1]*gammaDs[1][1] + gammaDs[1][2]*gammaDs[1][2] + gammaDs[1][3]*gammaDs[1][3] ) ) )
h_gammaD_2_Phi.Fill( gammaDs[1][8] )
gammaDPlotCounter = 1
if len(muons) == 4:
muons_sorted_pT = sorted(muons, key=itemgetter(6), reverse=True)
muons = muons_sorted_pT
h_muon_pT_0.Fill( muons[0][6] )
h_muon_pT_1.Fill( muons[1][6] )
h_muon_pT_2.Fill( muons[2][6] )
h_muon_pT_3.Fill( muons[3][6] )
h_muon_eta_0.Fill( muons[0][7] )
h_muon_eta_1.Fill( muons[1][7] )
h_muon_eta_2.Fill( muons[2][7] )
h_muon_eta_3.Fill( muons[3][7] )
h_muon_phi_0.Fill( muons[0][8] )
h_muon_phi_1.Fill( muons[1][8] )
h_muon_phi_2.Fill( muons[2][8] )
h_muon_phi_3.Fill( muons[3][8] )
h_muon_p_0.Fill( sqrt( muons[0][1]*muons[0][1] + muons[0][2]*muons[0][2] + muons[0][3]*muons[0][3] ) )
h_muon_p_1.Fill( sqrt( muons[1][1]*muons[1][1] + muons[1][2]*muons[1][2] + muons[1][3]*muons[1][3] ) )
h_muon_p_2.Fill( sqrt( muons[2][1]*muons[2][1] + muons[2][2]*muons[2][2] + muons[2][3]*muons[2][3] ) )
h_muon_p_3.Fill( sqrt( muons[3][1]*muons[3][1] + muons[3][2]*muons[3][2] + muons[3][3]*muons[3][3] ) )
h_muon_pZ_0.Fill( muons[0][3] )
h_muon_pZ_1.Fill( muons[1][3] )
h_muon_pZ_2.Fill( muons[2][3] )
h_muon_pZ_3.Fill( muons[3][3] )
parent = muons[1][9] #this is an arbitrary choice to find real dimuons
for i in range(0, len(muons) ):
if parent == muons[i][9]:
DimuonIndex1.append(i)
else:
DimuonIndex2.append(i)
px1 = muons[DimuonIndex1[0]][1] + muons[DimuonIndex1[1]][1]
py1 = muons[DimuonIndex1[0]][2] + muons[DimuonIndex1[1]][2]
pz1 = muons[DimuonIndex1[0]][3] + muons[DimuonIndex1[1]][3]
e1 = muons[DimuonIndex1[0]][4] + muons[DimuonIndex1[1]][4]
px2 = muons[DimuonIndex2[0]][1] + muons[DimuonIndex2[1]][1]
py2 = muons[DimuonIndex2[0]][2] + muons[DimuonIndex2[1]][2]
pz2 = muons[DimuonIndex2[0]][3] + muons[DimuonIndex2[1]][3]
e2 = muons[DimuonIndex2[0]][4] + muons[DimuonIndex2[1]][4]
bamV4_1 = ROOT.TLorentzVector(px1, py1, pz1, e1)
bamV4_2 = ROOT.TLorentzVector(px2, py2, pz2, e2)
bamDimuons.append(( bamV4_1.Px(), bamV4_1.Py(), bamV4_1.Pz(), bamV4_1.E(), bamV4_1.M(), bamV4_1.Pt(), bamV4_1.Eta(), bamV4_1.Phi() ))
bamDimuons.append(( bamV4_2.Px(), bamV4_2.Py(), bamV4_2.Pz(), bamV4_2.E(), bamV4_2.M(), bamV4_2.Pt(), bamV4_2.Eta(), bamV4_2.Phi() ))
bamDimuons_Sorted_M = sorted(bamDimuons, key=itemgetter(4), reverse=True)
bamDimuons = bamDimuons_Sorted_M
h_m1_vs_m2.Fill(bamDimuons[0][4],bamDimuons[1][4])
h_m1.Fill(bamDimuons[0][4])
h_m2.Fill(bamDimuons[1][4])
bamDimuons_Sorted_pT = sorted(bamDimuons, key=itemgetter(5), reverse=True)
bamDimuons = bamDimuons_Sorted_pT
h_dimuon_1_pT.Fill(bamDimuons[0][5])
h_dimuon_2_pT.Fill(bamDimuons[1][5])
h_dimuon_1_pZ.Fill(bamDimuons[0][2])
h_dimuon_2_pZ.Fill(bamDimuons[1][2])
h_dimuon_1_p.Fill(sqrt( bamDimuons[0][0]*bamDimuons[0][0] + bamDimuons[0][1]*bamDimuons[0][1] + bamDimuons[0][2]*bamDimuons[0][2] ))
h_dimuon_2_p.Fill(sqrt( bamDimuons[1][0]*bamDimuons[1][0] + bamDimuons[1][1]*bamDimuons[1][1] + bamDimuons[1][2]*bamDimuons[1][2] ))
h_dimuon_1_Eta.Fill(bamDimuons[0][6])
h_dimuon_2_Eta.Fill(bamDimuons[1][6])
h_dimuon_1_Phi.Fill(bamDimuons[0][7])
h_dimuon_2_Phi.Fill(bamDimuons[1][7])
parent = muons[1][9] #this is an arbitrary choice to find the fake dimuons
charge = muons[1][0]
for i in range(0, len(muons) ):
if parent != muons[i][9] and charge != muons[i][0]:
FakeIndex1.append(i)
FakeIndex1.append(1)
for j in range(0, len(muons) ):
if j != FakeIndex1[0] and j != FakeIndex1[1]:
FakeIndex2.append(j)
Fakepx1 = muons[FakeIndex1[0]][1] + muons[FakeIndex1[1]][1]
Fakepy1 = muons[FakeIndex1[0]][2] + muons[FakeIndex1[1]][2]
Fakepz1 = muons[FakeIndex1[0]][3] + muons[FakeIndex1[1]][3]
Fakee1 = muons[FakeIndex1[0]][4] + muons[FakeIndex1[1]][4]
Fakepx2 = muons[FakeIndex2[0]][1] + muons[FakeIndex2[1]][1]
Fakepy2 = muons[FakeIndex2[0]][2] + muons[FakeIndex2[1]][2]
Fakepz2 = muons[FakeIndex2[0]][3] + muons[FakeIndex2[1]][3]
Fakee2 = muons[FakeIndex2[0]][4] + muons[FakeIndex2[1]][4]
fakeV4_1 = ROOT.TLorentzVector(Fakepx1, Fakepy1, Fakepz1, Fakee1)
fakeV4_2 = ROOT.TLorentzVector(Fakepx2, Fakepy2, Fakepz2, Fakee2)
FakeDimuons.append(( fakeV4_1.Px(), fakeV4_1.Py(), fakeV4_1.Pz(), fakeV4_1.E(), fakeV4_1.M(), fakeV4_1.Pt(), fakeV4_1.Eta(), fakeV4_1.Phi() ))
FakeDimuons.append(( fakeV4_2.Px(), fakeV4_2.Py(), fakeV4_2.Pz(), fakeV4_2.E(), fakeV4_2.M(), fakeV4_2.Pt(), fakeV4_2.Eta(), fakeV4_2.Phi() ))
h_dimuon_m_fake_log_0.Fill(FakeDimuons[0][4])
h_dimuon_m_fake_log_0.Fill(FakeDimuons[1][4])
h_dimuon_m_fake_0.Fill(FakeDimuons[0][4])
h_dimuon_m_fake_0.Fill(FakeDimuons[1][4])
# is1SelMu17 = False
# for i in range(0, len(muons) ):
# if muons[i][6] >= 17. and abs(muons[i][7]) <= 0.9: is1SelMu17 = True
#
# is4SelMu8 = False
# nSelMu8 = 0
# for i in range(0, len(muons) ):
# if muons[i][6] >= 8. and abs(muons[i][7]) <= 2.4: nSelMu8 = nSelMu8 + 1
# if nSelMu8 == 4: is4SelMu8 = True
#
# if is1SelMu17 and is4SelMu8:
# for i in range(0, len(muons) ):
# for j in range(i+1, len(muons) ):
# if muons[i][0] * muons[j][0] < 0:
# px = muons[i][1] + muons[j][1]
# py = muons[i][2] + muons[j][2]
# pz = muons[i][3] + muons[j][3]
# E = muons[i][4] + muons[j][4]
# v4 = ROOT.TLorentzVector(px, py, pz, E)
# dimuons.append(( i, j, v4.Px(), v4.Py(), v4.Pz(), v4.E(), v4.M(), v4.Pt(), v4.Eta(), v4.Phi() ))
# dimuons_sorted_M = sorted(dimuons, key=itemgetter(6), reverse=True)
# dimuons = dimuons_sorted_M
# # print "Dimuons:", dimuons
# h_dimuon_m_0.Fill( dimuons[0][6] )
# h_dimuon_m_1.Fill( dimuons[1][6] )
# h_dimuon_m_2.Fill( dimuons[2][6] )
# h_dimuon_m_3.Fill( dimuons[3][6] )
#
# h_dimuon_m_log_0.Fill( dimuons[0][6] )
# h_dimuon_m_log_1.Fill( dimuons[1][6] )
# h_dimuon_m_log_2.Fill( dimuons[2][6] )
# h_dimuon_m_log_3.Fill( dimuons[3][6] )
#
# #print dimuons[0][6]
# #print float(mass_GammaD_Legend)
# #if dimuons[0][6] > float(mass_GammaD_Legend): print "fake"
# #if dimuons[0][6] <= float(mass_GammaD_Legend): print "real"
# if dimuons[0][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[0][6])
# if dimuons[0][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[0][6])
# if dimuons[1][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[1][6])
# if dimuons[1][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[1][6])
# if dimuons[2][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[2][6])
# if dimuons[2][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[2][6])
# if dimuons[3][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[3][6])
# if dimuons[3][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[3][6])
#
# if dimuons[0][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[0][6])
# if dimuons[0][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[0][6])
# if dimuons[1][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[1][6])
# if dimuons[1][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[1][6])
# if dimuons[2][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[2][6])
# if dimuons[2][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[2][6])
# if dimuons[3][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[3][6])
# if dimuons[3][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[3][6])
# dimuons5GeV = []
# for i in range(0, len(dimuons)):
# # select only dimuons with invariant mass less than 5 GeV
# if dimuons[i][6] < 5.0: dimuons5GeV.append( dimuons[i] )
#
# nDimuons5GeV = len(dimuons5GeV)
#
# is2DiMuons = False
# nMuJetsContainMu17 = 0
# m_threshold_Mu17_pT = 17.0
# m_threshold_Mu17_eta = 0.9
# m_randomSeed = 1234
# if nDimuons5GeV == 2:
# # select only dimuons that do NOT share muons
# if dimuons5GeV[0][0] != dimuons5GeV[1][0] and dimuons5GeV[0][0] != dimuons5GeV[1][1] and dimuons5GeV[0][1] != dimuons5GeV[1][1] and dimuons5GeV[0][1] != dimuons5GeV[1][0]:
# isDimuon0ContainMu17 = False
# if ( muons[ dimuons5GeV[0][0] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[0][0] ][7] < m_threshold_Mu17_eta ) or ( muons[ dimuons5GeV[0][1] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[0][1] ][7] < m_threshold_Mu17_eta ):
# isDimuon0ContainMu17 = True
# if ( muons[ dimuons5GeV[1][0] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[1][0] ][7] < m_threshold_Mu17_eta ) or ( muons[ dimuons5GeV[1][1] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[1][1] ][7] < m_threshold_Mu17_eta ):
# isDimuon1ContainMu17 = True
# if isDimuon0ContainMu17 == True and isDimuon1ContainMu17 == False:
# is2DiMuons = True
# muJetC = dimuons5GeV[0]
# muJetF = dimuons5GeV[1]
# elif isDimuon0ContainMu17 == False and isDimuon1ContainMu17 == True:
# is2DiMuons = True
# muJetC = dimuons5GeV[1]
# muJetF = dimuons5GeV[0]
# elif isDimuon0ContainMu17 == True and isDimuon1ContainMu17 == True:
# is2DiMuons = True
# if(ROOT.TRandom3(m_randomSeed).Integer(2) == 0):
# muJetC = dimuons5GeV[0]
# muJetF = dimuons5GeV[1]
# else:
# muJetC = dimuons5GeV[1]
# muJetF = dimuons5GeV[0]
# else:
# is2DiMuons = False
#
# is2DiMuonsMassOK = False
# if is2DiMuons:
# massC = muJetC[6]
# massF = muJetF[6]
# h_m1_vs_m2.Fill(massC, massF)
# h_m1.Fill( massC )
# h_m2.Fill( massF )
# if abs(massC-massF) < (0.13 + 0.065*(massC+massF)/2.0):
# is2DiMuonsMassOK = True
#
# if is2DiMuonsMassOK == True:
# nEventsOK = nEventsOK + 1
print "nEvents = ", nEvents
print "nEventsOK = ", nEventsOK
################################################################################
# Draw histograms
################################################################################
Etmiss_dummy.Draw()
Etmiss.DrawNormalized("same")
scaleAxisY(Etmiss,Etmiss_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.C")
h_higgs_pT_dummy.Draw()
h_higgs_pT.DrawNormalized("same")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.C")
h_higgs_pZ_dummy.Draw()
#h_higgs_pZ.DrawNormalized("same")
plotOverflow(h_higgs_pZ)
scaleAxisY(h_higgs_pZ,h_higgs_pZ_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.C")
#h_higgs_Eta_dummy.Draw()
#h_higgs_Eta.DrawNormalized("same")
#info.Draw()
#txtHeader.Draw()
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.png")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.png")
h_higgs_Phi_dummy.Draw()
h_higgs_Phi.DrawNormalized("same")
#scaleAxisY(h_higgs_Phi,h_higgs_Phi_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.C")
cnv.SetLogx()
h_higgs_M_dummy.Draw()
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_M_dummy.Draw("same")
h_higgs_M.DrawNormalized("same")
h_higgs_M.GetXaxis().SetMoreLogLabels()
h_higgs_M.DrawNormalized("same")
info.Draw()
txtHeader.Draw()
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_M_dummy.Draw("same")
h_higgs_M.DrawNormalized("same")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.C")
cnv.SetLogx(0)
h_higgs_p_dummy.Draw()
#h_higgs_p.DrawNormalized("same")
plotOverflow(h_higgs_p)
scaleAxisY(h_higgs_p,h_higgs_p_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.C")
h_n1_1_pT_dummy.Draw()
h_n1_1_pT.DrawNormalized("same")
h_n1_2_pT.DrawNormalized("same")
scaleAxisY(h_n1_1_pT, h_n1_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_pT,"1st neutralino","L")
legend.AddEntry(h_n1_2_pT,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.C")
h_n1_1_pZ_dummy.Draw()
plotOverflow(h_n1_1_pZ)
plotOverflow(h_n1_2_pZ)
scaleAxisY(h_n1_1_pZ,h_n1_1_pZ_dummy)
#h_n1_1_pZ.DrawNormalized("same")
#h_n1_2_pZ.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_pZ,"1st neutralino","L")
legend.AddEntry(h_n1_2_pZ,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.C")
h_n1_1_Eta_dummy.Draw()
h_n1_1_Eta.DrawNormalized("same")
h_n1_2_Eta.DrawNormalized("same")
scaleAxisY(h_n1_1_Eta,h_n1_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_Eta,"1st neutralino","L")
legend.AddEntry(h_n1_2_Eta,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.C")
h_n1_1_Phi_dummy.Draw()
h_n1_1_Phi.DrawNormalized("same")
h_n1_2_Phi.DrawNormalized("same")
scaleAxisY(h_n1_1_Phi,h_n1_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_Phi,"1st neutralino","L")
legend.AddEntry(h_n1_2_Phi,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.C")
h_n1_1_p_dummy.Draw()
plotOverflow(h_n1_1_p)
plotOverflow(h_n1_2_p)
scaleAxisY(h_n1_1_p,h_n1_1_p_dummy)
#h_n1_1_p.DrawNormalized("same")
#h_n1_2_p.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_p,"1st neutralino","L")
legend.AddEntry(h_n1_2_p,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.C")
h_n1_1_M_dummy.Draw()
h_n1_1_M.DrawNormalized("same")
#h_n1_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_n1_1_M,"1st neutralino (leading p_{T})","L")
#legend.AddEntry(h_n1_2_M,"2nd neutralino","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.C")
h_nD_1_pT_dummy.Draw()
#h_nD_1_pT.DrawNormalized("same")
#h_nD_2_pT.DrawNormalized("same")
plotOverflow(h_nD_1_pT)
plotOverflow(h_nD_2_pT)
scaleAxisY(h_nD_2_pT,h_nD_1_pT)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_pT,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_pT,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.C")
h_nD_1_pZ_dummy.Draw()
h_nD_1_pZ.DrawNormalized("same")
h_nD_2_pZ.DrawNormalized("same")
scaleAxisY(h_nD_2_pZ,h_nD_1_pZ_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_pZ,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_pZ,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.C")
h_nD_1_Eta_dummy.Draw()
h_nD_1_Eta.DrawNormalized("same")
h_nD_2_Eta.DrawNormalized("same")
scaleAxisY(h_nD_1_Eta,h_nD_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_Eta,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_Eta,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.C")
h_nD_1_Phi_dummy.Draw()
h_nD_1_Phi.DrawNormalized("same")
h_nD_2_Phi.DrawNormalized("same")
scaleAxisY(h_nD_1_Phi,h_nD_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_Phi,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_Phi,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.C")
h_nD_1_p_dummy.Draw()
h_nD_1_p.DrawNormalized("same")
h_nD_2_p.DrawNormalized("same")
scaleAxisY(h_nD_2_p,h_nD_1_p_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_p,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_p,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.C")
h_nD_1_M_dummy.Draw()
h_nD_1_M.DrawNormalized("same")
#h_nD_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_nD_1_M,"1st n_{D} (leading p_{T})","L")
#legend.AddEntry(h_nD_2_M,"2nd n_{D}","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.C")
h_gammaD_cT_dummy.Draw()
normConstant = integral(h_gammaD_cT)
#print normConstant
h_gammaD_cT.Scale(1/normConstant)
h_gammaD_cT.Draw("same")
scaleAxisYcT(h_gammaD_cT,h_gammaD_cT_dummy)
funct = ROOT.TF1("funct","exp(-x/"+ lifetime_GammaD_Legend +")/("+ lifetime_GammaD_Legend + "*(1 - exp(-" + str(cTlim) + "/" + lifetime_GammaD_Legend + ")))",cTlow,cTlim)
funct.SetNpx(10000)
funct.Draw("same")
h_gammaD_cT.SetTitleOffset(1.5, "Y")
h_gammaD_cT.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_cT.SetYTitle("Normalized Fraction of events")
h_gammaD_cT.SetTitleSize(0.05,"Y")
info.Draw()
txtHeader.Draw()
eqn = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
eqn.SetFillColor(ROOT.kWhite)
eqn.SetFillStyle(0)
eqn.SetBorderSize(0)
eqn.SetTextFont(42)
eqn.SetTextSize(0.02777778)
eqn.SetMargin(0.13)
eqn.AddEntry(funct, "#frac{e^{-x/"+ lifetime_GammaD_Legend +"}}{"+ lifetime_GammaD_Legend + " (1 - e^{-" + str(cTlim) + "/" + lifetime_GammaD_Legend + "})}", "L")
eqn.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.C")
h_gammaD_cT_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_lab)
h_gammaD_cT_lab.Scale(1/normConstant)
h_gammaD_cT_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_lab,h_gammaD_cT_lab_dummy)
#h_gammaD_cT_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_lab.Fit("myfit").Draw("same")
h_gammaD_cT_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_lab.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_cT_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.C")
h_gammaD_cT_XY_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_XY_lab)
h_gammaD_cT_XY_lab.Scale(1/normConstant)
h_gammaD_cT_XY_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_XY_lab,h_gammaD_cT_XY_lab_dummy)
#h_gammaD_cT_XY_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L_{xy}")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_XY_lab.Fit("myfit").Draw("same")
h_gammaD_cT_XY_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_XY_lab.SetXTitle("L_{xy} of #gamma_{D} [mm]")
h_gammaD_cT_XY_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.C")
h_gammaD_cT_Z_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_Z_lab)
h_gammaD_cT_Z_lab.Scale(1/normConstant)
h_gammaD_cT_Z_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_Z_lab,h_gammaD_cT_Z_lab_dummy)
#h_gammaD_cT_Z_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L_{z}")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_Z_lab.Fit("myfit").Draw("same")
h_gammaD_cT_Z_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_Z_lab.SetXTitle("L_{z} of #gamma_{D} [mm]")
h_gammaD_cT_Z_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.C")
h_gammaD_1_cT_dummy.Draw()
normConstant = integral(h_gammaD_1_cT)
h_gammaD_1_cT.Scale(1/normConstant)
h_gammaD_1_cT.Draw("same")
normConstant2 = integral(h_gammaD_2_cT)
h_gammaD_2_cT.Scale(1/normConstant2)
h_gammaD_2_cT.Draw("same")
scaleAxisYcT(h_gammaD_2_cT,h_gammaD_1_cT_dummy)
#h_gammaD_1_cT.DrawNormalized("same")
#h_gammaD_2_cT.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.C")
h_gammaD_1_cT_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_lab)
h_gammaD_1_cT_lab.Scale(1/normConstant)
h_gammaD_1_cT_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_lab)
h_gammaD_2_cT_lab.Scale(1/normConstant2)
h_gammaD_2_cT_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_lab,h_gammaD_1_cT_lab_dummy)
#h_gammaD_1_cT_lab.DrawNormalized("same")
#h_gammaD_2_cT_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.C")
h_gammaD_1_cT_XY_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_XY_lab)
h_gammaD_1_cT_XY_lab.Scale(1/normConstant)
h_gammaD_1_cT_XY_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_XY_lab)
h_gammaD_2_cT_XY_lab.Scale(1/normConstant2)
h_gammaD_2_cT_XY_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_XY_lab,h_gammaD_1_cT_XY_lab_dummy)
#h_gammaD_1_cT_XY_lab.DrawNormalized("same")
#h_gammaD_2_cT_XY_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_XY_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_XY_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.C")
h_gammaD_1_cT_Z_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_Z_lab)
h_gammaD_1_cT_Z_lab.Scale(1/normConstant)
h_gammaD_1_cT_Z_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_Z_lab)
h_gammaD_2_cT_Z_lab.Scale(1/normConstant2)
h_gammaD_2_cT_Z_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_Z_lab,h_gammaD_1_cT_Z_lab_dummy)
#h_gammaD_1_cT_Z_lab.DrawNormalized("same")
#h_gammaD_2_cT_Z_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_Z_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_Z_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.C")
h_gammaD_1_pT_dummy.Draw()
h_gammaD_1_pT.DrawNormalized("same")
h_gammaD_2_pT.DrawNormalized("same")
scaleAxisY(h_gammaD_2_pT,h_gammaD_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_pT,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_pT,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.C")
h_gammaD_1_pZ_dummy.Draw()
#plotOverflow(h_gammaD_1_pZ)
#plotOverflow(h_gammaD_2_pZ)
h_gammaD_1_pZ.DrawNormalized("same")
h_gammaD_2_pZ.DrawNormalized("same")
scaleAxisY(h_gammaD_2_pZ,h_gammaD_1_pZ_dummy)
#htmp = ROOT.TH1F(h_gammaD_1_pZ.GetName(),h_gammaD_1_pZ.GetTitle(), h_gammaD_1_pZ.GetNbinsX()+1, h_gammaD_1_pZ.GetBinLowEdge(1), h_gammaD_1_pZ.GetBinLowEdge(h_gammaD_1_pZ.GetNbinsX()+1)+h_gammaD_1_pZ.GetBinWidth(h_gammaD_1_pZ.GetNbinsX()+1))
#for i in range(1, h_gammaD_1_pZ.GetNbinsX()+1 ):
# htmp.Fill(htmp.GetBinCenter(i), h_gammaD_1_pZ.GetBinContent(i))
#htmp.Fill(h_gammaD_1_pZ.GetNbinsX()-1, h_gammaD_1_pZ.GetBinContent(0))
#htmp.SetEntries(h_gammaD_1_pZ.GetEntries())
#htmp.SetLineColor(ROOT.kRed)
#htmp.DrawNormalized("same")
#htmp2 = ROOT.TH1F(h_gammaD_2_pZ.GetName(), h_gammaD_2_pZ.GetTitle(), h_gammaD_2_pZ.GetNbinsX()+1, h_gammaD_2_pZ.GetBinLowEdge(1), h_gammaD_2_pZ.GetBinLowEdge(h_gammaD_2_pZ.GetNbinsX()+1)+h_gammaD_2_pZ.GetBinWidth(h_gammaD_2_pZ.GetNbinsX()+1))
#for i in range(1, h_gammaD_2_pZ.GetNbinsX()+1 ):
# htmp2.Fill(htmp2.GetBinCenter(i), h_gammaD_2_pZ.GetBinContent(i))
#htmp2.Fill(h_gammaD_2_pZ.GetNbinsX()-1, h_gammaD_2_pZ.GetBinContent(0))
#htmp2.SetEntries(h_gammaD_2_pZ.GetEntries())
#htmp2.SetLineColor(ROOT.kBlue)
#htmp2.DrawNormalized("same")
#h_gammaD_1_pZ.DrawNormalized("same")
#h_gammaD_2_pZ.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_pZ,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_pZ,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.C")
h_gammaD_1_Eta_dummy.Draw()
h_gammaD_1_Eta.DrawNormalized("same")
h_gammaD_2_Eta.DrawNormalized("same")
scaleAxisY(h_gammaD_1_Eta,h_gammaD_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_Eta,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_Eta,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.C")
h_gammaD_1_Phi_dummy.Draw()
h_gammaD_1_Phi.DrawNormalized("same")
h_gammaD_2_Phi.DrawNormalized("same")
scaleAxisY(h_gammaD_1_Phi,h_gammaD_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_Phi,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_Phi,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.C")
h_gammaD_1_p_dummy.Draw()
plotOverflow(h_gammaD_1_p)
plotOverflow(h_gammaD_2_p)
scaleAxisY(h_gammaD_2_p,h_gammaD_1_p_dummy)
#h_gammaD_1_p.DrawNormalized("same")
#h_gammaD_2_p.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_p,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_p,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.C")
h_gammaD_1_M_dummy.Draw()
cnv.SetLogx()
h_gammaD_1_M.DrawNormalized("same")
#h_gammaD_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_gammaD_1_M,"1st dark photon (leading p_{T})","L")
#legend.AddEntry(h_gammaD_2_M,"2nd dark photon","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.C")
cnv.SetLogx(0)
h_muon_pT_dummy.Draw()
h_muon_pT_0.DrawNormalized("same")
h_muon_pT_1.DrawNormalized("same")
h_muon_pT_2.DrawNormalized("same")
h_muon_pT_3.DrawNormalized("same")
scaleAxisY(h_muon_pT_3,h_muon_pT_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_pT_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_pT_1,"2nd muon","L")
legend.AddEntry(h_muon_pT_2,"3rd muon","L")
legend.AddEntry(h_muon_pT_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.C")
h_muon_phi_dummy.Draw()
h_muon_phi_0.DrawNormalized("same")
h_muon_phi_1.DrawNormalized("same")
h_muon_phi_2.DrawNormalized("same")
h_muon_phi_3.DrawNormalized("same")
scaleAxisY(h_muon_phi_0,h_muon_phi_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_phi_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_phi_1,"2nd muon","L")
legend.AddEntry(h_muon_phi_2,"3rd muon","L")
legend.AddEntry(h_muon_phi_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.C")
h_muon_pZ_dummy.Draw()
h_muon_pZ_0.DrawNormalized("same")
h_muon_pZ_1.DrawNormalized("same")
h_muon_pZ_2.DrawNormalized("same")
h_muon_pZ_3.DrawNormalized("same")
scaleAxisY(h_muon_pZ_3,h_muon_pZ_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_pZ_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_pZ_1,"2nd muon","L")
legend.AddEntry(h_muon_pZ_2,"3rd muon","L")
legend.AddEntry(h_muon_pZ_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.C")
h_muon_p_dummy.Draw()
h_muon_p_0.DrawNormalized("same")
h_muon_p_1.DrawNormalized("same")
h_muon_p_2.DrawNormalized("same")
h_muon_p_3.DrawNormalized("same")
scaleAxisY(h_muon_p_3,h_muon_p_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_p_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_p_1,"2nd muon","L")
legend.AddEntry(h_muon_p_2,"3rd muon","L")
legend.AddEntry(h_muon_p_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.C")
h_muon_eta_dummy.Draw()
h_muon_eta_0.DrawNormalized("same")
h_muon_eta_1.DrawNormalized("same")
h_muon_eta_2.DrawNormalized("same")
h_muon_eta_3.DrawNormalized("same")
scaleAxisY(h_muon_eta_0,h_muon_eta_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_eta_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_eta_1,"2nd muon","L")
legend.AddEntry(h_muon_eta_2,"3rd muon","L")
legend.AddEntry(h_muon_eta_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.C")
#h_dimuon_m_dummy.Draw()
#h_dimuon_m_0.DrawNormalized("same")
#h_dimuon_m_1.DrawNormalized("same")
#h_dimuon_m_2.DrawNormalized("same")
#h_dimuon_m_3.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_0,"1st dimuon (leading m_{#mu#mu})","L")
#legend.AddEntry(h_dimuon_m_1,"2nd dimuon","L")
#legend.AddEntry(h_dimuon_m_2,"3rd dimuon","L")
#legend.AddEntry(h_dimuon_m_3,"4th dimuon","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.png")
## convert -define.pdf:use-cropbox=true -density 300 CSxBR_vs_mh.pdf -resize 900x900 CSxBR_vs_mh.png
#
#h_dimuon_m_log_dummy.Draw()
#cnv.SetLogy()
#h_dimuon_m_log_0.DrawNormalized("same")
#h_dimuon_m_log_1.DrawNormalized("same")
#h_dimuon_m_log_2.DrawNormalized("same")
#h_dimuon_m_log_3.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_log_0,"1st dimuon (leading m_{#mu#mu})","L")
#legend.AddEntry(h_dimuon_m_log_1,"2nd dimuon","L")
#legend.AddEntry(h_dimuon_m_log_2,"3rd dimuon","L")
#legend.AddEntry(h_dimuon_m_log_3,"4th dimuon","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_log.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_log.png")
#cnv.SetLogy(0)
#
#h_dimuon_m_real_fake_dummy.Draw()
#h_dimuon_m_real_fake_0.DrawNormalized("same")
#h_dimuon_m_real_fake_1.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_real_fake_0,"Real dimuons","L")
#legend.AddEntry(h_dimuon_m_real_fake_1,"Fake dimuons","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake.png")
#
#h_dimuon_m_real_fake_log_dummy.Draw()
#cnv.SetLogy()
#h_dimuon_m_real_fake_log_0.DrawNormalized("same")
#h_dimuon_m_real_fake_log_1.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_real_fake_log_0,"Real dimuons","L")
#legend.AddEntry(h_dimuon_m_real_fake_log_1,"Fake dimuons","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake_log.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake_log.png")
cnv.SetLogy(0)
h_m1_vs_m2.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.C")
cnv.SetLogx()
h_m2.Draw()
h_m1.Draw("same")
info.Draw()
txtHeader.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.C")
cnv.SetLogx(0)
h_dimuon_m_fake_dummy.Draw()
h_dimuon_m_fake_0.DrawNormalized("same")
scaleAxisY(h_dimuon_m_fake_0,h_dimuon_m_fake_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.C")
h_dimuon_m_fake_log_dummy.Draw()
cnv.SetLogy()
cnv.SetLogx()
h_dimuon_m_fake_log_0.DrawNormalized("same")
#scaleAxisY(h_dimuon_m_fake_log_0,h_dimuon_m_fake_log_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.C")
cnv.SetLogy(0)
cnv.SetLogx(0)
h_dimuon_1_pT_dummy.Draw()
h_dimuon_1_pT.DrawNormalized("same")
h_dimuon_2_pT.DrawNormalized("same")
scaleAxisY(h_dimuon_2_pT,h_dimuon_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_pT,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_pT,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.C")
h_dimuon_1_pZ_dummy.Draw()
#plotOverflow(h_dimuon_1_pZ)
#plotOverflow(h_dimuon_2_pZ)
h_dimuon_1_pZ.DrawNormalized("same")
h_dimuon_2_pZ.DrawNormalized("same")
scaleAxisY(h_dimuon_2_pZ,h_dimuon_1_pZ_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_pZ,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_pZ,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.C")
h_dimuon_1_Eta_dummy.Draw()
h_dimuon_1_Eta.DrawNormalized("same")
h_dimuon_2_Eta.DrawNormalized("same")
scaleAxisY(h_dimuon_1_Eta,h_dimuon_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_Eta,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_Eta,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.C")
h_dimuon_1_Phi_dummy.Draw()
h_dimuon_1_Phi.DrawNormalized("same")
h_dimuon_2_Phi.DrawNormalized("same")
scaleAxisY(h_dimuon_1_Phi,h_dimuon_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_Phi,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_Phi,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.C")
h_dimuon_1_p_dummy.Draw()
plotOverflow(h_dimuon_1_p)
plotOverflow(h_dimuon_2_p)
scaleAxisY(h_dimuon_2_p,h_dimuon_1_p_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_p,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_p,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.C")
BAM.Write()
print "Made it to the end and closes"
f.close()
| [
"[email protected]"
] | |
3523fe1ae052b3f169f7bc74db4e83be9b2377c2 | 40afc1f3790099d2d5270503d101f30c71a89f07 | /usersys/views/user.py | d4c9af3172aaa675d041cfa02bcb920867dd7649 | [] | no_license | fhydralisk/reviewing | a3d31af1e8fe8caf2e831b35816d638ac0cadcce | 7a27f278f85f9fdbcc805b0290f6bbdbb7147609 | refs/heads/master | 2020-05-14T23:27:37.229343 | 2019-05-07T12:28:21 | 2019-05-07T12:28:21 | 181,997,119 | 0 | 2 | null | 2019-05-07T07:38:14 | 2019-04-18T01:49:53 | Python | UTF-8 | Python | false | false | 431 | py | from base.views import WLAPIGenericView
from ..serializers import user as user_serializers
from ..funcs import user as user_funcs
class UserView(WLAPIGenericView):
http_method_names = ['get', 'patch', 'options']
API_SERIALIZER = {
'patch': user_serializers.UserPartialUpdateSerializer
}
RESULT_SERIALIZER = {
'get': user_serializers.UserDetailSerializer
}
FUNC_CLASS = user_funcs.UserFunc
| [
"[email protected]"
] | |
62ab32f13bfb48de1118f28c062ed0d2f5702325 | 6e5c83baa19e09bcc59300d764ce936f8cbe6b5b | /pybtex/style/names/plain.py | 62c0c2ca311b0e086a1a078c4410d14d84d02f38 | [
"MIT"
] | permissive | rybesh/pybtex | 84e10b12f6c9ade0de2af638bfc23945109eff6d | 18e0b5336f07ebc5dc97aa899362fb292ea7bb5a | refs/heads/master | 2016-08-07T20:15:26.865726 | 2011-03-18T18:03:48 | 2011-03-18T18:03:48 | 1,246,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,441 | py | # Copyright (c) 2010, 2011 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from pybtex.style.template import join
from pybtex.style.names import BaseNameStyle, name_part
class NameStyle(BaseNameStyle):
name = 'plain'
def format(self, person, abbr=False):
r"""
Format names similarly to {ff~}{vv~}{ll}{, jj} in BibTeX.
>>> from pybtex.core import Person
>>> name = Person(string=r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin")
>>> plain = NameStyle().format
>>> print plain(name).format().plaintext()
Charles Louis Xavier<nbsp>Joseph de<nbsp>la Vall{\'e}e<nbsp>Poussin
>>> print plain(name, abbr=True).format().plaintext()
C.<nbsp>L. X.<nbsp>J. de<nbsp>la Vall{\'e}e<nbsp>Poussin
>>> name = Person(first='First', last='Last', middle='Middle')
>>> print plain(name).format().plaintext()
First<nbsp>Middle Last
>>> print plain(name, abbr=True).format().plaintext()
F.<nbsp>M. Last
>>> print plain(Person('de Last, Jr., First Middle')).format().plaintext()
First<nbsp>Middle de<nbsp>Last, Jr.
"""
return join [
name_part(tie=True) [person.first(abbr) + person.middle(abbr)],
name_part(tie=True) [person.prelast()],
name_part [person.last()],
name_part(before=', ') [person.lineage()]
]
| [
"[email protected]"
] | |
712c8911fb30a81f68341c8d02607fc01373169c | bc2effb57e82128b81371fb03547689255d5ef15 | /백준/그래프/13549(숨바꼭질 3).py | 3e27f94ac43b4efa403bf096775a59d3e8e538cd | [] | no_license | CharmingCheol/python-algorithm | 393fa3a8921f76d25e0d3f02402eae529cc283ad | 61c8cddb72ab3b1fba84171e03f3a36f8c672648 | refs/heads/master | 2023-03-01T11:00:52.801945 | 2021-01-31T13:38:29 | 2021-01-31T13:38:29 | 229,561,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | import sys
from collections import deque
MAX_SIZE = 100001
start, end = map(int, sys.stdin.readline().split())
board = [float("inf")] * MAX_SIZE
board[start] = 0
queue = deque()
queue.append((start, 0))
while queue:
now, value = queue.popleft()
if now == end:
print(board[now])
break
if value != board[now]: continue
if 0 <= now - 1 and value + 1 < board[now - 1]:
board[now - 1] = value + 1
queue.append((now - 1, value + 1))
if now + 1 < MAX_SIZE and value + 1 < board[now + 1]:
board[now + 1] = value + 1
queue.append((now + 1, value + 1))
if now * 2 < MAX_SIZE and value < board[now * 2]:
board[now * 2] = value
queue.append((now * 2, value))
| [
"[email protected]"
] | |
86e497f7d8b7f8e601d5bdf3d3d634b51fbc04bf | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/hair_tool/curves_resample.py | bbf794543f831be09e4c96a6a4ed9485f74a8093 | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,061 | py | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copyright (C) 2017 JOSECONSCO
# Created by JOSECONSCO
import bpy
import math
import numpy as np
from bpy.props import EnumProperty, FloatProperty, BoolProperty, IntProperty, StringProperty
from .resample2d import interpol_Catmull_Rom, get_strand_proportions
class HT_OT_CurvesResample(bpy.types.Operator):
bl_label = "Curve resample"
bl_idname = "object.curve_resample"
bl_description = "Change ammount of points on curve"
bl_options = {"REGISTER", "UNDO"}
hairType: bpy.props.EnumProperty(name="Output Curve Type", default="NURBS",
items=(("BEZIER", "Bezier", ""),
("NURBS", "Nurbs", ""),
("POLY", "Poly", "")))
# bezierRes: IntProperty(name="Bezier resolution", default=3, min=1, max=12)
t_in_y: IntProperty(name="Strand Segments", default=8, min=3, max=20)
uniformPointSpacing: BoolProperty(name="Uniform spacing", description="Distribute stand points with uniform spacing", default=False)
equalPointCount: BoolProperty(name="Equal point count", description="Give all cures same points count \n"
"If disabled shorter curves will have less points", default=False)
onlySelection: BoolProperty(name="Only Selected", description="Affect only selected points", default=False)
def invoke(self, context, event):
particleObj = context.active_object
if particleObj.mode == 'EDIT':
self.onlySelection = True
elif particleObj.mode == 'OBJECT':
self.onlySelection = False
Curve = context.active_object
if not Curve.type == 'CURVE':
self.report({'INFO'}, 'Use operator on curve type object')
return {"CANCELLED"}
self.input_spline_type = Curve.data.splines[0].type
self.hairType = self.input_spline_type # hair type - output spline
if self.input_spline_type == 'NURBS':
self.nurbs_order = Curve.data.splines[0].order_u
if len(Curve.data.splines) > 0: # do get initnial value for resampling t
polyline = Curve.data.splines[0] # take first spline len for resampling
if polyline.type == 'NURBS' or polyline.type == 'POLY':
self.t_in_y = len(polyline.points)
else:
self.t_in_y = len(polyline.bezier_points)
self.bezierRes = Curve.data.resolution_u
return self.execute(context)
def execute(self, context):
curveObj = context.active_object
if curveObj.type != 'CURVE':
self.report({'INFO'}, 'Works only on curves')
return {"CANCELLED"}
pointsList = []
pointsRadius = []
pointsTilt = []
selectedSplines = []
if self.onlySelection:
for polyline in curveObj.data.splines:
if polyline.type == 'NURBS' or polyline.type == 'POLY':
if any(point.select == True for point in polyline.points):
selectedSplines.append(polyline)
else:
if any(point.select_control_point == True for point in polyline.bezier_points):
selectedSplines.append(polyline)
if not selectedSplines:
selectedSplines = curveObj.data.splines
else:
selectedSplines = curveObj.data.splines
for polyline in selectedSplines: # for strand point
if polyline.type == 'NURBS' or polyline.type == 'POLY':
points = polyline.points
else:
points = polyline.bezier_points
if len(points) > 1: # skip single points
pointsList.append([point.co.to_3d() for point in points])
pointsRadius.append([point.radius for point in points])
pointsTilt.append([point.tilt for point in points])
backup_mat_indices = [spline.material_index for spline in selectedSplines]
interpolRad = []
interpolTilt = []
splinePointsList = interpol_Catmull_Rom(pointsList, self.t_in_y, uniform_spacing = self.uniformPointSpacing, same_point_count=self.equalPointCount)
if self.equalPointCount: # each output spline will have same point count
t_ins_y = [i / (self.t_in_y - 1) for i in range(self.t_in_y)]
for radii, tilts in zip(pointsRadius, pointsTilt): # per strand
t_rad = [i / (len(radii) - 1) for i in range(len(radii))]
interpolRad.append(np.interp(t_ins_y, t_rad, radii)) # first arg len() = out len
interpolTilt.append(np.interp(t_ins_y, t_rad, tilts)) # first arg len() = out len
else: # shorter output splines will have less points
lens = [len(x) for x in splinePointsList]
for radii, tilts, strandLen in zip(pointsRadius, pointsTilt, lens): # per strand
t_ins_Normalized = [i / (strandLen - 1) for i in range(strandLen)]
t_rad = [[i / (len(radii) - 1) for i in range(len(radii))]]
interpolRad.append(np.interp(t_ins_Normalized, t_rad[0], radii)) # first arg len() = out len
interpolTilt.append(np.interp(t_ins_Normalized, t_rad[0], tilts)) # first arg len() = out len
curveData = curveObj.data
# spline_type =
if self.onlySelection:
for spline in selectedSplines:
curveData.splines.remove(spline)
else:
curveData.splines.clear()
newSplines = []
for k, splinePoints in enumerate(splinePointsList): # for each strand/ring
curveLenght = len(splinePoints)
polyline = curveData.splines.new(self.hairType)
newSplines.append(polyline)
if self.hairType == 'BEZIER':
polyline.bezier_points.add(curveLenght - 1)
elif self.hairType == 'POLY' or self.hairType == 'NURBS':
polyline.points.add(curveLenght - 1)
if self.hairType == 'NURBS':
polyline.order_u = self.nurbs_order if self.input_spline_type == 'NURBS' else 3
polyline.use_endpoint_u = True
np_splinePointsOnes = np.ones((len(splinePoints), 4)) # 4 coord x,y,z ,1
np_splinePointsOnes[:, :3] = splinePoints
if self.hairType == 'BEZIER':
polyline.bezier_points.foreach_set('co', np_splinePointsOnes[:, :3])
polyline.bezier_points.foreach_set('radius', interpolRad[k])
polyline.bezier_points.foreach_set('tilt', interpolTilt[k])
polyline.bezier_points.foreach_set('handle_left_type', 'AUTO')
polyline.bezier_points.foreach_set('handle_right_type', 'AUTO')
else:
polyline.points.foreach_set('co', np_splinePointsOnes.ravel())
polyline.points.foreach_set('radius', interpolRad[k])
polyline.points.foreach_set('tilt', interpolTilt[k])
curveData.resolution_u = self.bezierRes
# bpy.ops.object.curve_uv_refresh()
for backup_mat, newSpline in zip(backup_mat_indices, newSplines):
newSpline.material_index = backup_mat
return {"FINISHED"}
| [
"[email protected]"
] | |
7e33879f634aa7e8d75988cebf28a1a0a95922cf | 9918208c80a3c396d8a1e13783d501d60dbc2050 | /digitalearthau/index.py | 184f71b63443c944423a74ab43f21a32af6c40c5 | [] | no_license | benjimin/digitalearthau | 2d3010be76fad0d0b6b4854dbbad07e98254b239 | 5098bf3c88627cad78a8caa5ab703c586c17a6f7 | refs/heads/develop | 2022-02-27T07:36:16.009689 | 2017-09-14T05:51:27 | 2017-09-14T05:51:27 | 103,460,937 | 0 | 0 | null | 2017-09-13T23:10:15 | 2017-09-13T23:10:15 | null | UTF-8 | Python | false | false | 7,353 | py | import collections
import uuid
from datetime import datetime
from typing import Iterable, Optional, Mapping, List
from datacube.index import index_connect
from datacube.index._api import Index
from datacube.model import Dataset
from datacube.scripts import dataset as dataset_script
from datacube.utils import uri_to_local_path
from digitalearthau.utils import simple_object_repr
class DatasetLite:
"""
A small subset of datacube.model.Dataset.
A "real" dataset needs a lot of initialisation: types etc, so this is easier to test with.
We also, in this script, depend heavily on the __eq__ behaviour of this particular class (by id only), and subtle
bugs could occur if the core framework made changes to it.
"""
def __init__(self, id_: uuid.UUID, archived_time: datetime = None) -> None:
# Sanity check of the type, as our equality checks are quietly wrong if the types don't match,
# and we've previously had problems with libraries accidentally switching string/uuid types...
assert isinstance(id_, uuid.UUID)
self.id = id_
self.archived_time = archived_time
@property
def is_archived(self):
"""
Is this dataset archived?
(an archived dataset is one that is not intended to be used by users anymore: eg. it has been
replaced by another dataset. It will not show up in search results, but still exists in the
system via provenance chains or through id lookup.)
:rtype: bool
"""
return self.archived_time is not None
def __eq__(self, other):
if not other:
return False
return self.id == other.id
def __hash__(self):
return hash(self.id)
@classmethod
def from_agdc(cls, dataset: Dataset):
return DatasetLite(dataset.id, archived_time=dataset.archived_time)
def __repr__(self):
return simple_object_repr(self)
class DatasetPathIndex:
"""
An index of datasets and their URIs.
This is a slightly questionable attempt to make testing/mocking simpler.
There's two implementations: One in-memory and one that uses a real datacube.
(MemoryDatasetPathIndex and AgdcDatasetPathIndex)
"""
def iter_all_uris(self, query: dict) -> Iterable[str]:
raise NotImplementedError
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
raise NotImplementedError
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
raise NotImplementedError
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
raise NotImplementedError
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
raise NotImplementedError
def add_dataset(self, dataset: DatasetLite, uri: str):
raise NotImplementedError
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""Map of all datasets to their uri list. Convenience method for tests"""
raise NotImplementedError
def close(self):
"""Do any clean-up as needed before forking."""
# Default implementation: no-op
pass
class AgdcDatasetPathIndex(DatasetPathIndex):
def __init__(self, index: Index) -> None:
super().__init__()
self._index = index
self._rules = dataset_script.load_rules_from_types(self._index)
def iter_all_uris(self, query: dict) -> Iterable[str]:
for uri, in self._index.datasets.search_returning(['uri'], **query):
yield str(uri)
@classmethod
def connect(cls) -> 'AgdcDatasetPathIndex':
return cls(index_connect(application_name='digitalearthau-pathsync'))
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
for d in self._index.datasets.get_datasets_for_location(uri=uri):
yield DatasetLite.from_agdc(d)
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
was_removed = self._index.datasets.remove_location(dataset.id, uri)
return was_removed
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
agdc_dataset = self._index.datasets.get(dataset_id)
return DatasetLite.from_agdc(agdc_dataset) if agdc_dataset else None
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
was_removed = self._index.datasets.add_location(dataset.id, uri)
return was_removed
def add_dataset(self, dataset: DatasetLite, uri: str):
path = uri_to_local_path(uri)
for d in dataset_script.load_datasets([path], self._rules):
if d.id == dataset.id:
self._index.datasets.add(d, sources_policy='ensure')
break
else:
raise RuntimeError('Dataset not found at path: %s, %s' % (dataset.id, uri))
def close(self):
self._index.close()
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""
All contained (dataset, [location]) values, to check test results.
"""
return dict(
(
DatasetLite(dataset.id),
tuple(dataset.uris)
)
for dataset in self._index.datasets.search()
)
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.close()
class MemoryDatasetPathIndex(DatasetPathIndex):
"""
An in-memory implementation, so that we can test without using a real datacube index.
"""
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
for d in self._records.keys():
if d.id == dataset_id:
return d
return None
def __init__(self):
super().__init__()
# Map of dataset to locations.
self._records = collections.defaultdict(list) # type: Mapping[DatasetLite, List[str]]
def reset(self):
self._records = collections.defaultdict(list)
def iter_all_uris(self, query: dict) -> Iterable[str]:
for uris in self._records.values():
yield from uris
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
if dataset not in self._records:
raise ValueError("Unknown dataset {} -> {}".format(dataset.id, uri))
return self._add(dataset, uri)
def _add(self, dataset_id, uri):
if uri in self._records[dataset_id]:
# Not added
return False
self._records[dataset_id].append(uri)
return True
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
if uri not in self._records[dataset]:
# Not removed
return False
# We never remove the dataset key, only the uris.
self._records[dataset].remove(uri)
return True
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
for dataset, uris in self._records.items():
if uri in uris:
yield dataset
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""
All contained (dataset, [location]) values, to check test results.
"""
return {id_: tuple(uris) for id_, uris in self._records.items()}
def add_dataset(self, dataset: DatasetLite, uri: str):
# We're not actually storing datasets...
return self._add(dataset, uri)
| [
"[email protected]"
] | |
bb7d789c7df59f3ef3d4b7d31cc5b89a64bbb3c6 | 51cbd904e17e45f6adb5303c3532a6ff0519ab42 | /sdk/tables/azure-data-tables/tests/test_table_service_properties_cosmos.py | 139f3c1973a4a4d8f57e5f7f63813ae8c7bfbeef | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | heaths/azure-sdk-for-python | 203e9a6052d7dff5b5f2346bced86b9406be3419 | 77feaf14471eba6642f5c7ae2f3f06981ff361d7 | refs/heads/master | 2022-07-26T06:46:57.067502 | 2021-04-15T21:35:26 | 2021-04-15T21:35:26 | 239,629,447 | 0 | 0 | MIT | 2020-02-10T22:46:20 | 2020-02-10T22:46:19 | null | UTF-8 | Python | false | false | 9,896 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import time
import pytest
from devtools_testutils import AzureTestCase
from azure.core.exceptions import HttpResponseError
from azure.data.tables import (
TableServiceClient,
TableAnalyticsLogging,
Metrics,
RetentionPolicy,
CorsRule
)
from _shared.testcase import TableTestCase
from preparers import CosmosPreparer
# ------------------------------------------------------------------------------
class TableServicePropertiesTest(AzureTestCase, TableTestCase):
# --Helpers-----------------------------------------------------------------
def _assert_properties_default(self, prop):
assert prop is not None
self._assert_logging_equal(prop['analytics_logging'], TableAnalyticsLogging())
self._assert_metrics_equal(prop['hour_metrics'], Metrics())
self._assert_metrics_equal(prop['minute_metrics'], Metrics())
self._assert_cors_equal(prop['cors'], list())
def _assert_logging_equal(self, log1, log2):
if log1 is None or log2 is None:
assert log1 == log2
return
assert log1.version == log2.version
assert log1.read == log2.read
assert log1.write == log2.write
assert log1.delete == log2.delete
self._assert_retention_equal(log1.retention_policy, log2.retention_policy)
def _assert_delete_retention_policy_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
assert policy1 == policy2
return
assert policy1.enabled == policy2.enabled
assert policy1.days == policy2.days
def _assert_static_website_equal(self, prop1, prop2):
if prop1 is None or prop2 is None:
assert prop1 == prop2
return
assert prop1.enabled == prop2.enabled
assert prop1.index_document == prop2.index_document
assert prop1.error_document404_path == prop2.error_document404_path
def _assert_delete_retention_policy_not_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
assert policy1 != policy2
return
assert not (policy1.enabled == policy2.enabled and policy1.days == policy2.days)
def _assert_metrics_equal(self, metrics1, metrics2):
if metrics1 is None or metrics2 is None:
assert metrics1 == metrics2
return
assert metrics1.version == metrics2.version
assert metrics1.enabled == metrics2.enabled
assert metrics1.include_apis == metrics2.include_apis
self._assert_retention_equal(metrics1.retention_policy, metrics2.retention_policy)
def _assert_cors_equal(self, cors1, cors2):
if cors1 is None or cors2 is None:
assert cors1 == cors2
return
assert len(cors1) == len(cors2)
for i in range(0, len(cors1)):
rule1 = cors1[i]
rule2 = cors2[i]
assert len(rule1.allowed_origins) == len(rule2.allowed_origins)
assert len(rule1.allowed_methods) == len(rule2.allowed_methods)
assert rule1.max_age_in_seconds == rule2.max_age_in_seconds
assert len(rule1.exposed_headers) == len(rule2.exposed_headers)
assert len(rule1.allowed_headers) == len(rule2.allowed_headers)
def _assert_retention_equal(self, ret1, ret2):
assert ret1.enabled == ret2.enabled
assert ret1.days == ret2.days
# --Test cases per service ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_table_service_properties(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
# Act
resp = tsc.set_service_properties(
analytics_logging=TableAnalyticsLogging(),
hour_metrics=Metrics(),
minute_metrics=Metrics(),
cors=list())
# Assert
assert resp is None
self._assert_properties_default(tsc.get_service_properties())
if self.is_live:
sleep(SLEEP_DELAY)
# --Test cases per feature ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_logging(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
logging = TableAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(analytics_logging=logging)
# Assert
received_props = tsc.get_service_properties()
self._assert_logging_equal(received_props['analytics_logging'], logging)
if self.is_live:
time.sleep(30)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_hour_metrics(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(hour_metrics=hour_metrics)
# Assert
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_minute_metrics(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(minute_metrics=minute_metrics)
# Assert
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_cors(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
allowed_methods = ['GET', 'PUT']
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = CorsRule(
allowed_origins,
allowed_methods,
max_age_in_seconds=max_age_in_seconds,
exposed_headers=exposed_headers,
allowed_headers=allowed_headers)
cors = [cors_rule1, cors_rule2]
# Act
tsc.set_service_properties(cors=cors)
# Assert
received_props = tsc.get_service_properties()
self._assert_cors_equal(received_props['cors'], cors)
if self.is_live:
sleep(SLEEP_DELAY)
# --Test cases for errors ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_too_many_cors_rules(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
tsc = TableServiceClient(self.account_url(tables_cosmos_account_name, "cosmos"), tables_primary_cosmos_account_key)
cors = []
for i in range(0, 6):
cors.append(CorsRule(['www.xyz.com'], ['GET']))
# Assert
pytest.raises(HttpResponseError,
tsc.set_service_properties, None, None, None, cors)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_retention_too_long(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
tsc = TableServiceClient(self.account_url(tables_cosmos_account_name, "cosmos"), tables_primary_cosmos_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=366))
# Assert
pytest.raises(HttpResponseError,
tsc.set_service_properties,
None, None, minute_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
class TestTableUnitTest(TableTestCase):
def test_retention_no_days(self):
# Assert
pytest.raises(ValueError, RetentionPolicy, True, None)
| [
"[email protected]"
] | |
15b6ae2d70b9799cb8748159e727ba2aff01ca67 | a7b4bd1db26f71ab941076691d894583e167a3fd | /tools/cli_auto_doc.py | 3fa4e46f23cc9b1663fdece8826ea5510b80263b | [
"Apache-2.0"
] | permissive | Mirantis/stackalytics | c422ccb27baa3f1fd7e68b9732ba0203144a3657 | 96ec7c6c630a9f2532b808069e045d434bbac200 | refs/heads/master | 2021-01-18T21:58:38.904481 | 2017-01-25T11:14:12 | 2017-01-25T11:14:12 | 10,863,780 | 3 | 4 | Apache-2.0 | 2020-02-26T11:45:53 | 2013-06-22T11:17:28 | Python | UTF-8 | Python | false | false | 1,806 | py | # Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
import ConfigParser as configparser
except ImportError:
import configparser
def split_multiline(value):
value = [element for element in
(line.strip() for line in value.split('\n'))
if element]
return value
def get_entry_points(config):
if 'entry_points' not in config:
return {}
return dict((option, split_multiline(value))
for option, value in config['entry_points'].items())
def make(cfg, dest):
parser = configparser.RawConfigParser()
parser.read(cfg)
config = {}
for section in parser.sections():
config[section] = dict(parser.items(section))
entry_points = get_entry_points(config)
console_scripts = entry_points.get('console_scripts')
if console_scripts:
for item in console_scripts:
tool = item.split('=')[0].strip()
print('Running %s' % tool)
os.system('%(tool)s --help > %(dest)s/%(tool)s.txt' %
dict(tool=tool, dest=dest))
if len(sys.argv) < 2:
print('Usage: cli_auto_doc <dest folder>')
sys.exit(1)
print('Generating docs from help to console tools')
make(cfg='setup.cfg', dest=sys.argv[1])
| [
"[email protected]"
] | |
58893a54c197fb68eeb0d035302bf64d8d6e53e9 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/3/gD3.py | aa7152104068969fce4fab0f59d40adbf339df10 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'gD3':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
1a8b3763c8a94e48cf8da659c686babc72716600 | 80abe7427ca501da06a9507cefa52d5c290f2833 | /Chapter04/topic_modeling.py | 841891d56168915143ec57282aeab11713c75372 | [] | no_license | CodedQuen/Raspberry-Pi-3-Cookbook-for-Python-Programmers | 7910c9cf9ebaf6f42510bd531bf965fd03e6efe8 | 4a77452c4510fd9c7da62099a93fdbc95a86245a | refs/heads/master | 2022-06-10T04:36:59.316284 | 2020-05-05T10:18:33 | 2020-05-05T10:18:33 | 261,421,883 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,220 | py |
from nltk.tokenize import RegexpTokenizer
from nltk.stem.snowball import SnowballStemmer
from gensim import models, corpora
from nltk.corpus import stopwords
# Load input words
def load_words(in_file):
element = []
with open(in_file, 'r') as f:
for line in f.readlines():
element.append(line[:-1])
return element
# Class to preprocedure of text
class Preprocedure(object):
# Initialize various operators
def __init__(self):
# Create a regular expression tokenizer
self.tokenizer = RegexpTokenizer(r'\w+')
# get the list of stop words
self.english_stop_words= stopwords.words('english')
# Create a Snowball stemmer
self.snowball_stemmer = SnowballStemmer('english')
# Tokenizing, stop word removal, and stemming
def procedure(self, in_data):
# Tokenize the string
token = self.tokenizer.tokenize(in_data.lower())
# Remove the stop words
tokenized_stopwords = [x for x in token if not x in self.english_stop_words]
# Perform stemming on the tokens
token_stemming = [self.snowball_stemmer.stem(x) for x in tokenized_stopwords]
return token_stemming
if __name__=='__main__':
# File containing linewise input data
in_file = 'data_topic_modeling.txt'
# Load words
element = load_words(in_file)
# Create a preprocedure object
preprocedure = Preprocedure()
# Create a list for processed documents
processed_tokens = [preprocedure.procedure(x) for x in element]
# Create a dictionary based on the tokenized documents
dict_tokens = corpora.Dictionary(processed_tokens)
corpus = [dict_tokens.doc2bow(text) for text in processed_tokens]
# Generate the LDA model based on the corpus we just created
num_of_topics = 2
num_of_words = 4
ldamodel = models.ldamodel.LdaModel(corpus,
num_topics=num_of_topics, id2word=dict_tokens, passes=25)
print "Most contributing words to the topics:"
for item in ldamodel.print_topics(num_topics=num_of_topics, num_words=num_of_words):
print "\nTopic", item[0], "==>", item[1]
| [
"[email protected]"
] | |
19ebe0e0280c2829c58678866cdc0eb3a1da7611 | bbe6f37f7347cb83f08846d505ac4aa6bc0031e6 | /purity_fb/purity_fb_1dot9/apis/arrays_api.py | 7bd8335e17e2cf1d85d4d5361d2a51ff900af97e | [
"Apache-2.0"
] | permissive | bsamz-ps/purity_fb_python_client | 02ff7213075cf1948e2db7b0835cc5fcc56f328a | 11f27ef0c72d8aac1fc4e1ed036cca038b85dfa4 | refs/heads/master | 2021-02-19T08:11:04.042758 | 2020-02-12T23:56:08 | 2020-02-12T23:56:08 | 245,294,511 | 0 | 0 | NOASSERTION | 2020-03-06T00:14:27 | 2020-03-06T00:14:26 | null | UTF-8 | Python | false | false | 43,419 | py | # coding: utf-8
"""
Pure Storage FlashBlade REST 1.9 Python SDK
Pure Storage FlashBlade REST 1.9 Python SDK. Compatible with REST API versions 1.0 - 1.9. Developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.9
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ArraysApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def list_arrays(self, **kwargs):
"""
List arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_with_http_info(**kwargs)
else:
(data) = self.list_arrays_with_http_info(**kwargs)
return data
def list_arrays_with_http_info(self, **kwargs):
"""
List arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_http_specific_performance(self, **kwargs):
"""
List instant or historical http specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_http_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayHttpPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_http_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_http_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_http_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical http specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_http_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayHttpPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_http_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/http-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayHttpPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_nfs_specific_performance(self, **kwargs):
"""
List instant or historical nfs specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_nfs_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayNfsPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_nfs_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_nfs_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_nfs_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical nfs specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_nfs_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayNfsPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_nfs_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/nfs-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayNfsPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_performance(self, **kwargs):
"""
List instant or historical array performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str protocol: to sample performance of a certain protocol
:return: ArrayPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_performance_with_http_info(**kwargs)
return data
def list_arrays_performance_with_http_info(self, **kwargs):
"""
List instant or historical array performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str protocol: to sample performance of a certain protocol
:return: ArrayPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution', 'protocol']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'protocol' in params:
query_params.append(('protocol', params['protocol']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_performance_replication(self, **kwargs):
"""
List instant or historical array replication performance.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_replication(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param int start_time: Time to start sample in milliseconds since epoch.
:param str type: to sample space of either file systems, object store, or all
:return: ArrayPerformanceReplicationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_performance_replication_with_http_info(**kwargs)
else:
(data) = self.list_arrays_performance_replication_with_http_info(**kwargs)
return data
def list_arrays_performance_replication_with_http_info(self, **kwargs):
"""
List instant or historical array replication performance.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_replication_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param int start_time: Time to start sample in milliseconds since epoch.
:param str type: to sample space of either file systems, object store, or all
:return: ArrayPerformanceReplicationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['end_time', 'resolution', 'start_time', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_performance_replication" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/performance/replication', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceReplicationResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_s3_specific_performance(self, **kwargs):
"""
List instant or historical object store specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_s3_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayS3PerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_s3_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_s3_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_s3_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical object store specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_s3_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayS3PerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_s3_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/s3-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayS3PerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_space(self, **kwargs):
"""
List instant or historical array space
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_space(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str type: to sample space of either file systems, object store, or all
:return: ArraySpaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_space_with_http_info(**kwargs)
else:
(data) = self.list_arrays_space_with_http_info(**kwargs)
return data
def list_arrays_space_with_http_info(self, **kwargs):
"""
List instant or historical array space
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_space_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str type: to sample space of either file systems, object store, or all
:return: ArraySpaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_space" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/space', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArraySpaceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_clients_performance(self, **kwargs):
"""
List client performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_clients_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: The way to order the results.
:param int limit: limit, should be >= 0
:return: ClientPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_clients_performance_with_http_info(**kwargs)
else:
(data) = self.list_clients_performance_with_http_info(**kwargs)
return data
def list_clients_performance_with_http_info(self, **kwargs):
"""
List client performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_clients_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: The way to order the results.
:param int limit: limit, should be >= 0
:return: ClientPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'filter', 'sort', 'limit']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_clients_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/clients/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClientPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_arrays(self, array_settings, **kwargs):
"""
Update arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_arrays(array_settings, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PureArray array_settings: (required)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_arrays_with_http_info(array_settings, **kwargs)
else:
(data) = self.update_arrays_with_http_info(array_settings, **kwargs)
return data
def update_arrays_with_http_info(self, array_settings, **kwargs):
"""
Update arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_arrays_with_http_info(array_settings, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PureArray array_settings: (required)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['array_settings']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_arrays" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'array_settings' is set
if ('array_settings' not in params) or (params['array_settings'] is None):
raise ValueError("Missing the required parameter `array_settings` when calling `update_arrays`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'array_settings' in params:
body_params = params['array_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
25b980a0be5f061c6bdc488b9c6e51969e8a81c7 | ceb5b7c3882b2bf3f53219356e914462c680f059 | /azure-mgmt-compute/azure/mgmt/compute/containerservice/v2017_01_31/models/container_service_client_enums.py | 279f5dcb9d3ff37bd26b6e9a9c88b555f28c3dff | [
"MIT"
] | permissive | codalab/azure-sdk-for-python | b712da2a377cfa526e0ffa4fa40408e6a81e48e3 | f4c92d02d46fcdee9da430a18a394b108a2f8920 | refs/heads/master | 2021-01-19T14:40:23.567035 | 2017-04-11T22:49:13 | 2017-04-11T22:49:13 | 88,180,409 | 1 | 0 | null | 2017-04-13T15:36:45 | 2017-04-13T15:36:44 | null | UTF-8 | Python | false | false | 2,291 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class ContainerServiceOchestratorTypes(Enum):
swarm = "Swarm"
dcos = "DCOS"
custom = "Custom"
kubernetes = "Kubernetes"
class ContainerServiceVMSizeTypes(Enum):
standard_a0 = "Standard_A0"
standard_a1 = "Standard_A1"
standard_a2 = "Standard_A2"
standard_a3 = "Standard_A3"
standard_a4 = "Standard_A4"
standard_a5 = "Standard_A5"
standard_a6 = "Standard_A6"
standard_a7 = "Standard_A7"
standard_a8 = "Standard_A8"
standard_a9 = "Standard_A9"
standard_a10 = "Standard_A10"
standard_a11 = "Standard_A11"
standard_d1 = "Standard_D1"
standard_d2 = "Standard_D2"
standard_d3 = "Standard_D3"
standard_d4 = "Standard_D4"
standard_d11 = "Standard_D11"
standard_d12 = "Standard_D12"
standard_d13 = "Standard_D13"
standard_d14 = "Standard_D14"
standard_d1_v2 = "Standard_D1_v2"
standard_d2_v2 = "Standard_D2_v2"
standard_d3_v2 = "Standard_D3_v2"
standard_d4_v2 = "Standard_D4_v2"
standard_d5_v2 = "Standard_D5_v2"
standard_d11_v2 = "Standard_D11_v2"
standard_d12_v2 = "Standard_D12_v2"
standard_d13_v2 = "Standard_D13_v2"
standard_d14_v2 = "Standard_D14_v2"
standard_g1 = "Standard_G1"
standard_g2 = "Standard_G2"
standard_g3 = "Standard_G3"
standard_g4 = "Standard_G4"
standard_g5 = "Standard_G5"
standard_ds1 = "Standard_DS1"
standard_ds2 = "Standard_DS2"
standard_ds3 = "Standard_DS3"
standard_ds4 = "Standard_DS4"
standard_ds11 = "Standard_DS11"
standard_ds12 = "Standard_DS12"
standard_ds13 = "Standard_DS13"
standard_ds14 = "Standard_DS14"
standard_gs1 = "Standard_GS1"
standard_gs2 = "Standard_GS2"
standard_gs3 = "Standard_GS3"
standard_gs4 = "Standard_GS4"
standard_gs5 = "Standard_GS5"
| [
"[email protected]"
] | |
825006c894ca28563ceb49ebb22caa4eb6aead20 | 4e0c1615c467c63524db9a33d0e769f1370f5a12 | /python-ops/training/exercise/test/murthy/r30.py | ae1cc0288d7548781efef0cae97dc498836eb388 | [] | no_license | infra-ops/cloud-ops | 1afb44ed29000491aaa5420ebc6e0b8d740fc55c | 4f676fde13f33c838f7f17affd705966a6d31da2 | refs/heads/master | 2023-08-27T03:00:32.867645 | 2023-08-23T14:27:59 | 2023-08-23T14:27:59 | 140,283,053 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | values = [100,200,300,400]
slice = values[1:3]
print(slice)
| [
"[email protected]"
] | |
985dad9eac8bbe27fa5b3adfb04734809e871ce4 | ae16f9dd815605e5f52f27dda77bd735abafb587 | /parser/councilors/elections_config.py | 1bf0e2a7db9c45f4024b1026e1cd6c38e1f368c0 | [
"CC0-1.0"
] | permissive | travishen/councilor-voter-guide | aa4a1aa3b86db9ca40b291baf461ff0330a369c0 | 09d9365676335854b2d4d0981f5cb925adf4c958 | refs/heads/master | 2020-04-13T10:09:07.688276 | 2018-11-28T14:51:05 | 2018-11-28T14:51:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,827 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.append('../')
import re
import json
import psycopg2
import ast
from sys import argv
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import db_settings
conn = db_settings.con()
c = conn.cursor()
election_year = ast.literal_eval(argv[1])['election_year']
def parse_districts(county, districts):
districts = re.sub(u'^(居住|【)', '', districts)
category = re.search(u'(平地原住民|山地原住民)$', districts)
districts = re.sub(u'(平地原住民|山地原住民)$', '', districts)
if category:
category = category.group()
districts = re.sub(u'(】|之)', '', districts)
l = []
if districts:
for district in districts.split(u'、'):
if len(district) == 2:
l = districts.split(u'、')
break
if not re.search(re.sub(u'[縣市]$', '', county), district):
district = re.sub(u'[鄉鎮市區]$', '', district)
l.append(district)
return l, category
# update constituencies
constituencies = json.load(open('../../voter_guide/static/json/dest/constituencies_%s.json' % election_year))
counties = {}
for region in constituencies:
if region['county'] not in counties.keys():
counties.update({
region['county']: {
'regions': [],
'duplicated': []
}
})
districts_list, category = parse_districts(region['county'], region['district'])
if category:
if districts_list:
district = u'%s(%s)' % (category, u'、'.join(districts_list))
else:
district = u'%s(%s)' % (category, u'全%s' % region['county'])
else:
district = u'、'.join(districts_list)
counties[region['county']]['regions'].append({
'constituency': region['constituency'],
'districts_list': districts_list,
'district': district,
'category': category
})
c.execute('''
update candidates_terms
set district = %s
where election_year = %s and county = %s and constituency = %s
''', (district, election_year, region['county'], region['constituency']))
scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name('credential.json', scope)
gc = gspread.authorize(credentials)
sh = gc.open_by_key('10zFDmMF9CJDXSIENXO8iJXKE5CLBY62i_mSeqe_qDug')
worksheets = sh.worksheets()
for wks in worksheets:
rows = wks.get_all_records()
if wks.title == u'議員':
for row in rows:
print row['county'], row['constituency']
if row['count_this']:
counties[row['county']]['regions'][int(row['constituency'])-1]['elected_count_pre'] = row['count_pre']
counties[row['county']]['regions'][int(row['constituency'])-1]['elected_count'] = row['count_this']
counties[row['county']]['regions'][int(row['constituency'])-1]['reserved_seats'] = row['reserved_seats']
else:
continue
config = json.dumps({'constituencies': counties})
c.execute('''
INSERT INTO elections_elections(id, data)
VALUES (%s, %s)
ON CONFLICT (id)
DO UPDATE
SET data = (COALESCE(elections_elections.data, '{}'::jsonb) || %s::jsonb)
''', [election_year, config, config])
conn.commit()
# update constituency_change
district_versions = json.load(open('../district_versions.json'))
config = json.dumps({'constituency_change': district_versions.get(election_year, {})})
c.execute('''
INSERT INTO elections_elections(id, data)
VALUES (%s, %s)
ON CONFLICT (id)
DO UPDATE
SET data = (COALESCE(elections_elections.data, '{}'::jsonb) || %s::jsonb)
''', [election_year, config, config])
conn.commit()
| [
"[email protected]"
] | |
88842d784deeecde1c87e82ab837462e8ead03f9 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /ZZsnGAjYLyosG9zmH_12.py | aa9e707c08223592b4481ac84b90ac438ecda630 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py |
def flash(fc):
var1, oper, var2 = fc
return var1 + var2 if oper == '+' else var1 - var2 if oper == '-' else var1 * var2 if oper == 'x' else round(var1 / var2, 2) if oper == '/' and var2 else None
| [
"[email protected]"
] | |
3f37df8301b6e1dbb044c648cb837c0f03ffdbc6 | a1582cec6239f627c6740b391d751f429675ee39 | /test_todo.py | 039a3c22c18438751c553f7c5c877b02e940182e | [] | no_license | SolbiatiAlessandro/todos | 7cabfd35d6c7d3cdd3232051be4a96c667d55f21 | b85e74c4fc220dccc5a0a05a288465b2da98f6d0 | refs/heads/master | 2020-03-28T18:56:09.847298 | 2018-10-15T15:07:01 | 2018-10-15T15:07:01 | 148,928,531 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | import unittest
import todo
from os import path
dir_path = path.dirname(path.realpath(__file__))
class testTODO( unittest.TestCase ):
def test_readElems( self ):
self.assertIsNotNone( todo.readElems() )
def test_todoDone( self ):
with open(dir_path+'/todos','a') as f:
f.write('"[test elem]" 0')
#import pdb;pdb.set_trace()
elems = todo.readElems()
self.assertEqual( "[test elem]", elems[0][1] )
todo.todoDone()
elems = todo.readElems()
self.assertNotEqual( "[test elem]", elems[0][1] )
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
bec5d5fbb09b6260d514209bc438f344d215832b | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-sblp/sblp_ut=3.5_rd=1_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=30/sched.py | a85202e958d39e172c17afa700742b708255c6d6 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 495 | py | -S 0 -X RUN -Q 0 -L 2 84 250
-S 1 -X RUN -Q 0 -L 2 80 250
-S 0 -X RUN -Q 0 -L 2 74 250
-S 0 -X RUN -Q 0 -L 2 59 250
-S 2 -X RUN -Q 1 -L 1 57 200
-S 2 -X RUN -Q 1 -L 1 48 175
-S 2 -X RUN -Q 1 -L 1 40 125
-S 2 -X RUN -Q 1 -L 1 33 300
-S 3 -X RUN -Q 2 -L 1 29 100
-S 3 -X RUN -Q 2 -L 1 27 125
-S 3 -X RUN -Q 2 -L 1 21 100
-S 3 -X RUN -Q 2 -L 1 19 150
-S 4 -X RUN -Q 3 -L 1 19 100
-S 4 -X RUN -Q 3 -L 1 15 100
-S 4 -X RUN -Q 3 -L 1 14 100
| [
"[email protected]"
] | |
c2abb820a33634fbd4d2baa8cc40894fd5ffc9db | afea9757be324c8def68955a12be11d71ce6ad35 | /willyanealves/customer_service/migrations/0018_remove_customerservice_serviceitem.py | 5389c0887e5e5598bfdb43884190c5126c6d8681 | [] | no_license | bergpb/willyane-alves | c713cac3ec3a68005f3b8145985693d2477ba706 | 8b2b9922ba35bf2043f2345228f03d80dbd01098 | refs/heads/master | 2023-02-10T19:57:50.893172 | 2021-01-11T16:17:14 | 2021-01-11T16:17:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | # Generated by Django 3.1.2 on 2020-11-16 14:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('customer_service', '0017_auto_20201116_1115'),
]
operations = [
migrations.RemoveField(
model_name='customerservice',
name='serviceitem',
),
]
| [
"[email protected]"
] | |
4968c79739666e8ad2edd13f77a4d5034eea9c2e | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp45_9000.py | 4d4f26e616aa9b6ced1b2bb6742aa541c91b1e07 | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,965 | py | ITEM: TIMESTEP
9000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
7.1778912625688207e-01 4.6482210873740115e+01
7.1778912625688207e-01 4.6482210873740115e+01
7.1778912625688207e-01 4.6482210873740115e+01
ITEM: ATOMS id type xs ys zs
8 1 0.128064 0.0616737 0.0601487
35 1 0.0656057 0.120012 0.0619676
130 1 0.0685266 0.0608412 0.125938
165 1 0.131318 0.120832 0.125956
1268 1 0.497119 0.93268 0.182308
1361 1 0.496588 0.750478 0.240454
133 1 0.126633 1.00601 0.124891
1460 1 0.500803 0.688928 0.435722
12 1 0.250297 0.0562202 0.0614448
39 1 0.192212 0.12316 0.0614049
43 1 0.310904 0.126252 0.0601463
134 1 0.189034 0.058369 0.123643
138 1 0.309645 0.0635839 0.120587
169 1 0.251645 0.126535 0.118547
137 1 0.247687 0.998671 0.125268
1505 1 -0.00176351 0.879562 0.375917
1090 1 0.0589042 0.812625 -0.00200376
16 1 0.373189 0.060716 0.0622224
47 1 0.438316 0.126239 0.0610428
142 1 0.437451 0.0601493 0.122661
173 1 0.378996 0.12285 0.11939
141 1 0.372617 1.00299 0.12081
15 1 0.439286 0.00251552 0.0601038
1124 1 0.000200366 0.942055 0.0542765
40 1 0.123702 0.18568 0.0616187
67 1 0.0646421 0.253388 0.0630245
72 1 0.12164 0.314239 0.0652134
162 1 0.0610812 0.179524 0.12545
194 1 0.0576994 0.309699 0.127795
197 1 0.123288 0.249691 0.120972
1641 1 0.243923 0.878471 0.504706
353 1 -4.25247e-05 0.36814 0.246475
42 1 0.312772 0.188421 -0.000934665
44 1 0.251675 0.196724 0.0615479
71 1 0.188218 0.252487 0.0601214
75 1 0.317639 0.248913 0.0683485
76 1 0.249202 0.315718 0.0597648
166 1 0.186483 0.190151 0.118945
170 1 0.318594 0.185733 0.127466
198 1 0.185731 0.312591 0.120281
201 1 0.248152 0.254854 0.127487
202 1 0.31761 0.313972 0.119253
340 1 0.49425 0.316618 0.314526
48 1 0.378232 0.186003 0.0651367
79 1 0.438053 0.251286 0.0629577
80 1 0.37839 0.312127 0.0577043
174 1 0.440193 0.18599 0.127182
205 1 0.380049 0.250952 0.125125
206 1 0.437785 0.319056 0.121805
1473 1 0.00366176 0.754869 0.374141
1169 1 0.501797 0.496945 0.123152
99 1 0.0652059 0.377204 0.0671047
104 1 0.130785 0.437832 0.0599418
226 1 0.0653255 0.431762 0.124595
229 1 0.130258 0.376473 0.121647
1157 1 0.125964 0.494957 0.124186
1645 1 0.372234 0.871195 0.501377
1281 1 -0.00107149 0.500479 0.250666
180 1 0.503362 0.186886 0.188477
103 1 0.186807 0.373728 0.0608981
107 1 0.317603 0.377198 0.0633315
108 1 0.252165 0.438418 0.061388
230 1 0.188993 0.439229 0.120675
233 1 0.247896 0.378781 0.126012
234 1 0.312021 0.441367 0.124453
1035 1 0.312601 0.505172 0.0603768
1236 1 0.495387 0.816815 0.186807
77 1 0.376869 0.246907 -0.0038311
111 1 0.439765 0.373956 0.0617845
112 1 0.378935 0.440822 0.0633536
237 1 0.376515 0.378723 0.125523
238 1 0.439402 0.436295 0.128258
1165 1 0.374516 0.498781 0.128345
10 1 0.313335 0.0635035 0.00239691
1518 1 0.436539 0.939934 0.372265
404 1 0.495014 0.0614877 0.441303
1027 1 0.0615267 0.494489 0.0656524
1032 1 0.122919 0.560398 0.0604642
1059 1 0.0600784 0.625129 0.0666329
1154 1 0.0623433 0.560911 0.129236
1189 1 0.125564 0.623536 0.126188
1517 1 0.373563 0.872828 0.38073
526 1 0.434098 0.0635261 0.499805
45 1 0.376622 0.127572 -0.00151715
1161 1 0.252976 0.505641 0.124012
1031 1 0.189052 0.500772 0.060167
1036 1 0.247119 0.565256 0.0624889
1063 1 0.186865 0.62311 0.0601622
1067 1 0.314961 0.628527 0.0609443
1158 1 0.183851 0.55771 0.121548
1162 1 0.315458 0.566389 0.12391
1193 1 0.24685 0.618006 0.126501
73 1 0.255723 0.254866 0.00159523
276 1 0.498132 0.0616439 0.312336
292 1 -0.000855735 0.18199 0.313154
1039 1 0.440855 0.498033 0.0604513
1040 1 0.377536 0.565027 0.0617404
1071 1 0.435508 0.62586 0.0629231
1166 1 0.434639 0.560493 0.127884
1197 1 0.373824 0.627258 0.126695
1064 1 0.12404 0.684612 0.0676066
1091 1 0.0625377 0.750335 0.0570339
1096 1 0.124176 0.812646 0.0626356
1186 1 0.0654683 0.685662 0.133279
1218 1 0.0631242 0.812612 0.128775
1221 1 0.119685 0.749536 0.126446
1538 1 0.0579054 0.562888 0.500989
1519 1 0.438729 0.881673 0.43737
1058 1 0.0632657 0.683946 -0.00315472
401 1 0.496951 1.00103 0.375212
1068 1 0.248596 0.682303 0.0628255
1095 1 0.187501 0.744823 0.0615051
1099 1 0.311162 0.748619 0.0570169
1100 1 0.251631 0.812802 0.0618844
1190 1 0.185829 0.686611 0.12615
1194 1 0.313421 0.683137 0.125319
1222 1 0.186526 0.81082 0.120182
1225 1 0.25031 0.751117 0.123451
1226 1 0.3149 0.811161 0.119609
1072 1 0.373794 0.686397 0.0575702
1103 1 0.441103 0.74609 0.0642725
1104 1 0.381761 0.808032 0.0629057
1198 1 0.438451 0.683202 0.123451
1229 1 0.37382 0.743217 0.120781
1230 1 0.433988 0.812488 0.123951
1396 1 0.495325 0.940886 0.309992
1476 1 0.00279954 0.815318 0.443657
4 1 0.00268231 0.0581827 0.0581558
1093 1 0.126199 0.745718 0.00234561
497 1 0.499164 0.378573 0.375227
522 1 0.312447 0.0605191 0.497947
1520 1 0.371576 0.93517 0.437435
3 1 0.0674949 0.994957 0.0630218
1123 1 0.059326 0.877502 0.0597483
1128 1 0.127883 0.931594 0.0623519
1250 1 0.0612731 0.937854 0.126495
1253 1 0.126191 0.876586 0.126658
1156 1 -0.000827386 0.558044 0.189483
589 1 0.370624 0.25181 0.503365
129 1 -0.00192829 1.00219 0.127046
148 1 0.502587 0.0619035 0.182207
7 1 0.184516 0.00143089 0.0600096
11 1 0.31236 0.99826 0.0663178
1127 1 0.186899 0.874887 0.0576461
1131 1 0.312698 0.878384 0.0579517
1132 1 0.248208 0.933403 0.0584845
1254 1 0.188238 0.93732 0.123193
1257 1 0.246739 0.874119 0.123142
1258 1 0.311784 0.936281 0.126723
132 1 -0.00453866 0.060552 0.186355
1135 1 0.435933 0.875127 0.0621282
1136 1 0.374582 0.937844 0.0647127
1261 1 0.379724 0.87299 0.120824
1262 1 0.433606 0.936288 0.122994
1297 1 0.498803 0.504573 0.251608
391 1 0.187404 0.999118 0.431824
557 1 0.37392 0.118721 0.498954
1153 1 0.00459273 0.494908 0.129415
136 1 0.126202 0.0553161 0.18993
163 1 0.063299 0.118718 0.187807
258 1 0.0611069 0.0584147 0.248711
264 1 0.124113 0.0614263 0.311048
291 1 0.062736 0.121139 0.31029
293 1 0.120955 0.120757 0.24694
395 1 0.31049 0.996986 0.43848
140 1 0.253893 0.0587744 0.186075
167 1 0.190556 0.121815 0.184051
171 1 0.312374 0.12346 0.188044
262 1 0.185705 0.0606897 0.243929
266 1 0.318322 0.0643545 0.246972
268 1 0.254636 0.0607924 0.303566
295 1 0.188235 0.120335 0.310605
297 1 0.244366 0.126288 0.245577
299 1 0.307617 0.125499 0.305123
267 1 0.309122 1.00017 0.313908
139 1 0.314734 0.991987 0.190697
263 1 0.185965 0.00462774 0.30789
1514 1 0.316488 0.935734 0.374018
144 1 0.370596 0.0619032 0.18186
175 1 0.441004 0.121214 0.183745
270 1 0.43291 0.0621934 0.246724
272 1 0.376497 0.0602291 0.312762
301 1 0.375449 0.130513 0.251919
303 1 0.438854 0.131573 0.312911
269 1 0.374654 -0.00598033 0.252123
143 1 0.437153 0.997103 0.182541
271 1 0.436365 -0.000629748 0.30798
1510 1 0.182019 0.943842 0.372819
617 1 0.250249 0.372187 0.497785
168 1 0.122791 0.187429 0.183995
195 1 0.0644383 0.248276 0.185065
200 1 0.122152 0.310914 0.1798
290 1 0.0639428 0.185573 0.250313
296 1 0.126817 0.17894 0.306485
322 1 0.0559255 0.305244 0.245609
323 1 0.0654555 0.245803 0.306491
325 1 0.127957 0.249944 0.246626
328 1 0.116928 0.309366 0.308294
172 1 0.2443 0.185926 0.182907
199 1 0.183147 0.251222 0.183258
203 1 0.312839 0.255755 0.183768
204 1 0.25383 0.318898 0.186369
294 1 0.185169 0.186456 0.242955
298 1 0.315348 0.194168 0.246951
300 1 0.246326 0.189627 0.307702
326 1 0.183193 0.316005 0.244495
327 1 0.1863 0.247557 0.312364
329 1 0.244554 0.251722 0.241651
330 1 0.313695 0.316519 0.247436
331 1 0.309809 0.253944 0.304844
332 1 0.246241 0.313833 0.307104
176 1 0.380015 0.190715 0.189758
207 1 0.437748 0.257997 0.187061
208 1 0.371477 0.317926 0.184364
302 1 0.437641 0.188309 0.24775
304 1 0.376547 0.193029 0.314082
333 1 0.377482 0.252986 0.249123
334 1 0.439004 0.317249 0.25003
335 1 0.437498 0.252285 0.315218
336 1 0.372407 0.317601 0.307845
1516 1 0.248167 0.936888 0.429475
1511 1 0.185505 0.882182 0.435153
1515 1 0.308149 0.873553 0.444158
1513 1 0.247877 0.874089 0.374604
227 1 0.0625331 0.369948 0.189446
232 1 0.121308 0.433538 0.188408
354 1 0.0612573 0.43898 0.247824
355 1 0.057687 0.37272 0.315501
357 1 0.121019 0.372975 0.250506
360 1 0.129054 0.43409 0.310108
1285 1 0.123928 0.499269 0.254047
585 1 0.252917 0.25314 0.499642
231 1 0.183807 0.370497 0.182229
235 1 0.309773 0.378042 0.183419
236 1 0.251048 0.444848 0.185051
358 1 0.186144 0.428092 0.24612
359 1 0.185041 0.372116 0.315725
361 1 0.251042 0.378036 0.24217
362 1 0.308691 0.43621 0.248735
363 1 0.311324 0.377282 0.305445
364 1 0.250553 0.434796 0.309524
1163 1 0.31277 0.506061 0.193779
1289 1 0.246192 0.497117 0.251662
1291 1 0.307341 0.502717 0.305753
17 1 0.503331 1.00092 -5.03253e-05
239 1 0.440343 0.374374 0.18613
240 1 0.376163 0.439988 0.186013
365 1 0.36885 0.378075 0.24027
366 1 0.434448 0.436328 0.247742
367 1 0.432639 0.375712 0.308075
368 1 0.373493 0.438174 0.306483
1167 1 0.436377 0.50294 0.186794
1295 1 0.434196 0.501743 0.316836
1293 1 0.378371 0.506905 0.253384
513 1 -0.00189259 0.00212709 0.500858
100 1 0.00119801 0.433537 0.0591184
9 1 0.256534 -0.00239472 -0.000735999
1283 1 0.0554452 0.499757 0.314319
1155 1 0.0640344 0.496992 0.188687
1160 1 0.120713 0.561963 0.189574
1187 1 0.0555345 0.624932 0.193093
1282 1 0.0617456 0.562975 0.252934
1288 1 0.121432 0.561162 0.31374
1315 1 0.0621049 0.631199 0.307189
1317 1 0.122939 0.62413 0.252207
1482 1 0.313295 0.812849 0.381979
1287 1 0.187668 0.494741 0.312467
1159 1 0.184026 0.49741 0.193531
1164 1 0.246418 0.561055 0.18888
1191 1 0.183611 0.625925 0.189048
1195 1 0.31032 0.627134 0.187905
1286 1 0.18642 0.561543 0.251511
1290 1 0.31083 0.565743 0.251574
1292 1 0.242998 0.561689 0.306627
1319 1 0.183334 0.621946 0.314476
1321 1 0.24408 0.625062 0.248175
1323 1 0.305154 0.622023 0.312297
389 1 0.119706 0.00123357 0.373769
1168 1 0.372108 0.562352 0.18918
1199 1 0.435448 0.627945 0.185547
1294 1 0.437751 0.569058 0.250806
1296 1 0.375338 0.570543 0.31563
1325 1 0.37646 0.626183 0.249524
1327 1 0.439703 0.625323 0.312478
578 1 0.0636694 0.313191 0.496664
1506 1 0.0637973 0.938676 0.371425
1512 1 0.121737 0.941106 0.440084
1509 1 0.127189 0.878438 0.372776
1192 1 0.123046 0.688033 0.187845
1219 1 0.0613539 0.752188 0.190863
1224 1 0.127652 0.812703 0.186383
1314 1 0.0591187 0.693024 0.251135
1320 1 0.125504 0.687539 0.315291
1346 1 0.0624753 0.814741 0.251216
1347 1 0.0631136 0.753759 0.315603
1349 1 0.12229 0.751867 0.25143
1352 1 0.126677 0.81769 0.310533
1196 1 0.24688 0.688359 0.18463
1223 1 0.182295 0.752967 0.181997
1227 1 0.30992 0.748346 0.186609
1228 1 0.248295 0.816616 0.184981
1318 1 0.183483 0.685925 0.246744
1322 1 0.311265 0.683378 0.251826
1324 1 0.250927 0.687423 0.312015
1350 1 0.186261 0.81377 0.242094
1351 1 0.185594 0.753797 0.309324
1353 1 0.245452 0.752787 0.248648
1354 1 0.309983 0.810458 0.251481
1355 1 0.309267 0.750401 0.31391
1356 1 0.250346 0.812691 0.312997
1200 1 0.371615 0.686802 0.189665
1231 1 0.438037 0.745254 0.184269
1232 1 0.372234 0.808338 0.186221
1326 1 0.439108 0.686839 0.248975
1328 1 0.37254 0.685192 0.312443
1357 1 0.374662 0.74914 0.252853
1358 1 0.436601 0.816136 0.252206
1359 1 0.434355 0.752433 0.307619
1360 1 0.37066 0.815408 0.311074
1507 1 0.0585493 0.884574 0.436326
261 1 0.126547 0.998641 0.249606
259 1 0.0594185 1.0002 0.312242
131 1 0.0649599 -0.000253709 0.189757
1251 1 0.0668458 0.875254 0.187111
1256 1 0.125963 0.937016 0.183067
1378 1 0.0623391 0.939438 0.249718
1379 1 0.0599284 0.876582 0.313747
1381 1 0.126176 0.8777 0.251222
1384 1 0.128308 0.938884 0.309896
1581 1 0.374494 0.624682 0.501411
1479 1 0.18626 0.751787 0.439931
135 1 0.189054 0.997703 0.189372
265 1 0.251625 0.998369 0.248092
1255 1 0.187301 0.87938 0.18865
1259 1 0.316432 0.873808 0.187525
1260 1 0.251329 0.938428 0.186274
1382 1 0.187403 0.935852 0.250142
1383 1 0.188878 0.879769 0.312023
1385 1 0.250392 0.873387 0.249988
1386 1 0.31777 0.937031 0.254483
1387 1 0.307981 0.877218 0.315473
1388 1 0.246757 0.943671 0.315454
1450 1 0.312723 0.682158 0.376817
1452 1 0.253781 0.683369 0.436627
1263 1 0.433699 0.873106 0.188809
1264 1 0.374185 0.938183 0.185338
1389 1 0.371537 0.871141 0.247088
1390 1 0.436308 0.939597 0.249144
1391 1 0.434023 0.874305 0.311302
1392 1 0.375665 0.933085 0.310433
387 1 0.0616342 -0.00413245 0.436958
386 1 0.061482 0.0645407 0.377282
392 1 0.125507 0.062929 0.433097
419 1 0.0623716 0.124048 0.440118
421 1 0.121817 0.123526 0.375256
1446 1 0.187947 0.694219 0.378533
49 1 0.503626 0.122465 -0.00107789
393 1 0.253311 0.00025499 0.375897
390 1 0.187879 0.0606267 0.372048
394 1 0.308753 0.0645111 0.371937
396 1 0.24491 0.0633844 0.431491
423 1 0.184229 0.128727 0.43889
425 1 0.247658 0.128347 0.370059
427 1 0.309858 0.124829 0.428876
1249 1 0.00151478 0.877134 0.125833
1345 1 -0.00295084 0.756632 0.25095
397 1 0.372053 0.996971 0.368989
1481 1 0.255394 0.752909 0.378186
399 1 0.433472 0.998522 0.431204
398 1 0.437591 0.0632107 0.374811
400 1 0.372645 0.0628344 0.433534
429 1 0.377469 0.123785 0.368173
431 1 0.438543 0.122898 0.436122
484 1 0.00409659 0.440692 0.436626
1441 1 -0.0068194 0.627821 0.372784
1265 1 0.494599 0.874101 0.122291
1442 1 0.0664961 0.688396 0.376874
1480 1 0.123934 0.81997 0.440747
418 1 0.0637774 0.180642 0.375111
424 1 0.123416 0.188513 0.440106
450 1 0.0544253 0.305643 0.370747
451 1 0.0601029 0.24719 0.438875
453 1 0.123568 0.242505 0.375501
456 1 0.126368 0.308182 0.43273
1425 1 0.497515 0.501303 0.373066
422 1 0.187695 0.184892 0.37189
426 1 0.314613 0.189756 0.368818
428 1 0.251995 0.191165 0.437135
454 1 0.190124 0.311263 0.378359
455 1 0.192624 0.250081 0.437269
457 1 0.253342 0.248993 0.372108
458 1 0.306355 0.310842 0.365423
459 1 0.316976 0.254395 0.433986
460 1 0.250403 0.313012 0.436974
385 1 0.000979362 0.00155035 0.380446
625 1 0.503331 0.38075 0.493764
1457 1 0.500379 0.6232 0.376257
1486 1 0.441174 0.811765 0.372593
430 1 0.436814 0.190487 0.375901
432 1 0.369854 0.185121 0.436502
461 1 0.379987 0.252477 0.373166
462 1 0.435111 0.314155 0.37515
463 1 0.435808 0.248453 0.438508
464 1 0.372998 0.322606 0.430709
1485 1 0.370153 0.750663 0.376384
613 1 0.122453 0.370397 0.499864
1487 1 0.436175 0.751309 0.436427
483 1 0.0670427 0.374222 0.431024
488 1 0.124863 0.44224 0.439126
485 1 0.127392 0.372986 0.373951
482 1 0.0674446 0.440693 0.373924
1411 1 0.064305 0.508125 0.439929
1456 1 0.378168 0.684986 0.436154
492 1 0.25016 0.439669 0.439326
490 1 0.312402 0.438412 0.369941
489 1 0.251863 0.376475 0.372708
491 1 0.307602 0.378521 0.430293
486 1 0.189511 0.437104 0.37443
487 1 0.187667 0.374361 0.437045
1415 1 0.188102 0.497499 0.435269
13 1 0.374241 -0.00255023 -0.000359928
1454 1 0.435673 0.690608 0.368103
494 1 0.436172 0.433352 0.375688
496 1 0.366255 0.438591 0.440752
493 1 0.370309 0.378001 0.365833
495 1 0.438949 0.373918 0.440165
1423 1 0.43322 0.498099 0.438377
1421 1 0.37631 0.500472 0.379461
1448 1 0.124443 0.69231 0.439071
388 1 -0.00533016 0.0629443 0.435993
1488 1 0.380978 0.814464 0.438349
1541 1 0.126678 0.502835 0.500573
1478 1 0.190813 0.808273 0.370274
1413 1 0.123012 0.502973 0.371588
1410 1 0.0607055 0.562531 0.37427
1416 1 0.126555 0.564325 0.440526
1443 1 0.0551028 0.628838 0.434272
1445 1 0.124618 0.628114 0.379744
1417 1 0.244448 0.497896 0.377885
1419 1 0.308974 0.500663 0.437795
1420 1 0.252069 0.559832 0.436869
1451 1 0.312574 0.618023 0.437868
1449 1 0.247743 0.620668 0.369153
1447 1 0.188116 0.626933 0.434837
1414 1 0.18501 0.561148 0.374979
1418 1 0.312203 0.560582 0.375451
1474 1 0.0650418 0.810559 0.375563
1329 1 0.50307 0.626445 0.249104
1484 1 0.240829 0.818129 0.439789
1453 1 0.372352 0.62291 0.378206
1424 1 0.376493 0.558007 0.439394
1455 1 0.43975 0.622962 0.443268
1422 1 0.438703 0.564425 0.373907
1475 1 0.0625113 0.752007 0.436975
1483 1 0.313634 0.751432 0.440339
1477 1 0.12918 0.754293 0.375045
1428 1 0.501697 0.563966 0.439711
1377 1 0.00204575 0.88003 0.253538
581 1 0.124903 0.250088 0.500275
1508 1 -0.00273591 0.939249 0.439701
46 1 0.441079 0.191083 0.0010372
161 1 0.000474957 0.121225 0.123844
97 1 -0.000383631 0.375334 -0.00143242
1140 1 0.498168 0.936939 0.0639151
1348 1 0.000430207 0.820489 0.314469
554 1 0.308173 0.187619 0.503906
590 1 0.434946 0.315694 0.50067
1060 1 0.000283365 0.687329 0.0698193
558 1 0.434819 0.18843 0.498072
1613 1 0.375005 0.747603 0.49975
372 1 0.501764 0.436281 0.31196
481 1 -0.00368022 0.377331 0.376243
1252 1 -0.0027364 0.941933 0.190192
433 1 0.497631 0.128759 0.37169
244 1 0.493577 0.43504 0.191685
1057 1 0.00204069 0.620631 0.00297113
1130 1 0.315573 0.937904 -0.00502408
1649 1 0.498916 0.874683 0.501422
228 1 -0.00200904 0.434553 0.186008
1028 1 0.00315237 0.558347 0.0668672
1393 1 0.500582 0.879586 0.253306
1092 1 -0.00146861 0.81369 0.066918
1605 1 0.123715 0.753241 0.499682
621 1 0.373023 0.378521 0.499891
1606 1 0.182793 0.817679 0.503344
582 1 0.184832 0.310355 0.497341
1129 1 0.250855 0.876582 -0.00081003
622 1 0.436628 0.435307 0.494339
1570 1 0.0557331 0.687147 0.496177
525 1 0.375579 0.000476324 0.49662
1573 1 0.126049 0.62329 0.498433
106 1 0.318575 0.432571 0.00458534
1569 1 -0.00179041 0.626512 0.503961
1574 1 0.187604 0.688748 0.496477
553 1 0.248271 0.119414 0.49558
1638 1 0.181406 0.939375 0.495799
521 1 0.248845 0.997398 0.497308
1134 1 0.436521 0.934922 0.00318348
514 1 0.0639809 0.0624754 0.493387
1033 1 0.251168 0.499266 0.000795641
81 1 0.506075 0.253019 0.00445748
1097 1 0.249724 0.746816 -0.00142485
518 1 0.183962 0.0664155 0.498152
614 1 0.182307 0.431586 0.498387
41 1 0.255728 0.126562 0.000835616
110 1 0.444605 0.432251 -0.00183524
1061 1 0.123987 0.62438 0.0031953
520 1 0.1252 0.0614843 0.555557
547 1 0.0620314 0.117144 0.557257
642 1 0.0695546 0.0614315 0.62409
677 1 0.124684 0.123272 0.622116
1700 1 0.00101246 0.686352 0.689902
2032 1 0.382164 0.938479 0.932941
2030 1 0.438879 0.93587 0.870083
772 1 0.000769592 0.0639141 0.80914
524 1 0.249243 0.0578783 0.564704
551 1 0.186384 0.121689 0.562887
555 1 0.309132 0.119411 0.56206
646 1 0.185935 0.0632394 0.628386
650 1 0.316229 0.0577595 0.627406
681 1 0.248625 0.11828 0.624511
523 1 0.31149 0.999273 0.561799
516 1 0.00543394 0.0619777 0.558973
649 1 0.256109 0.998104 0.622261
528 1 0.370896 0.0623663 0.558815
559 1 0.439948 0.120926 0.564551
654 1 0.437496 0.060425 0.626761
685 1 0.37294 0.121981 0.62437
653 1 0.373905 0.996316 0.623087
552 1 0.128424 0.188985 0.558243
579 1 0.0658063 0.254733 0.562862
584 1 0.123058 0.315951 0.563262
674 1 0.069042 0.184711 0.617286
706 1 0.0632727 0.309538 0.627352
709 1 0.126787 0.247377 0.623286
2029 1 0.373736 0.871884 0.878657
1668 1 -0.00322596 0.556349 0.687148
556 1 0.248287 0.182616 0.565385
583 1 0.191475 0.250634 0.562061
587 1 0.312262 0.254455 0.563499
588 1 0.25086 0.319155 0.559572
678 1 0.182991 0.185403 0.626886
682 1 0.310511 0.190577 0.624665
710 1 0.183807 0.310926 0.629449
713 1 0.246675 0.252406 0.626801
714 1 0.30826 0.312735 0.626267
721 1 0.499692 0.251525 0.627053
1602 1 0.0634611 0.815875 0.501439
560 1 0.374148 0.182946 0.561241
591 1 0.438979 0.251241 0.564756
592 1 0.37642 0.314363 0.564962
686 1 0.43576 0.189289 0.628774
717 1 0.377889 0.248524 0.625526
718 1 0.438718 0.311342 0.625345
2031 1 0.437497 0.873803 0.934551
1972 1 0.5019 0.691596 0.935257
1542 1 0.190403 0.560185 0.501972
1122 1 0.0643881 0.938714 0.995185
1577 1 0.249501 0.622202 0.493308
611 1 0.0647631 0.381704 0.563061
616 1 0.124809 0.44203 0.564227
738 1 0.067576 0.435805 0.624297
741 1 0.120532 0.369286 0.626147
1539 1 0.0614148 0.496667 0.5546
785 1 0.496098 0.00729805 0.755923
615 1 0.183843 0.369891 0.562052
619 1 0.31387 0.375183 0.560677
620 1 0.246567 0.434126 0.561779
742 1 0.187952 0.435324 0.62375
745 1 0.250269 0.375017 0.62337
746 1 0.307137 0.437961 0.625603
1547 1 0.310666 0.493485 0.564088
1637 1 0.120413 0.877746 0.498637
1841 1 0.500554 0.617498 0.74947
1034 1 0.315394 0.565924 1.00062
586 1 0.313461 0.314567 0.500037
623 1 0.439689 0.3762 0.554473
624 1 0.378293 0.433515 0.562361
749 1 0.376815 0.37412 0.625711
750 1 0.441483 0.433155 0.623068
1551 1 0.440196 0.497052 0.562824
1545 1 0.247598 0.500915 0.503279
1677 1 0.377979 0.496442 0.618921
5 1 0.125092 -0.00122863 0.999796
2001 1 0.500095 0.751921 0.868954
2028 1 0.253593 0.938916 0.939473
1620 1 0.498964 0.808977 0.564461
2033 1 0.49929 0.87612 0.874744
1669 1 0.126548 0.49681 0.625604
1544 1 0.121288 0.558284 0.565014
1571 1 0.0649863 0.622401 0.559259
1666 1 0.066144 0.566567 0.626786
1701 1 0.127124 0.63037 0.621908
529 1 0.496955 0.99654 0.500628
1921 1 0.00747398 0.495076 0.87411
2022 1 0.191032 0.94071 0.881071
2023 1 0.185459 0.8737 0.931692
1543 1 0.18956 0.496925 0.564153
1673 1 0.246998 0.496656 0.625166
1548 1 0.246164 0.563319 0.567943
1575 1 0.189166 0.627398 0.555903
1579 1 0.311763 0.625529 0.56077
1670 1 0.183615 0.564274 0.625936
1674 1 0.313972 0.562675 0.62347
1705 1 0.253001 0.631474 0.623704
913 1 0.502791 -0.000818464 0.873867
1101 1 0.372137 0.745685 0.998709
2026 1 0.315568 0.938672 0.876853
1552 1 0.373497 0.56169 0.563755
1583 1 0.442366 0.622568 0.557136
1678 1 0.440243 0.558172 0.628136
1709 1 0.38304 0.619133 0.62553
69 1 0.125565 0.250724 0.999599
1956 1 -0.000383628 0.682693 0.942352
1576 1 0.121276 0.686595 0.554888
1603 1 0.0612514 0.75145 0.56451
1608 1 0.119952 0.815358 0.562637
1698 1 0.0618077 0.685391 0.621662
1730 1 0.0621077 0.813463 0.627909
1733 1 0.12351 0.746574 0.624574
2025 1 0.254117 0.872117 0.881552
1889 1 0.00289885 0.8733 0.749938
1580 1 0.246303 0.686729 0.560345
1607 1 0.182497 0.749602 0.561968
1611 1 0.312015 0.747644 0.561952
1612 1 0.249163 0.811222 0.561529
1702 1 0.18781 0.684923 0.62066
1706 1 0.315301 0.684025 0.619984
1734 1 0.191343 0.812623 0.623524
1737 1 0.250353 0.748512 0.619576
1738 1 0.313558 0.810855 0.625193
38 1 0.188321 0.181934 0.998308
1828 1 -0.000513273 0.685702 0.819317
618 1 0.306673 0.436612 0.497459
2036 1 0.495081 0.938197 0.934182
2027 1 0.314586 0.873128 0.941012
903 1 0.190126 0.00398837 0.936317
1584 1 0.376808 0.688728 0.564426
1615 1 0.435037 0.755162 0.5658
1616 1 0.371832 0.81082 0.564382
1710 1 0.439745 0.682303 0.631958
1741 1 0.376171 0.745633 0.628114
1742 1 0.432461 0.812463 0.625196
1745 1 0.496975 0.752634 0.625064
549 1 0.123384 0.12405 0.497774
515 1 0.0658499 0.00152715 0.558977
645 1 0.126165 -0.00134032 0.624454
1635 1 0.0614402 0.873821 0.56618
1640 1 0.123325 0.933014 0.564167
1762 1 0.0631879 0.936491 0.62538
1765 1 0.127916 0.879116 0.625718
899 1 0.0641856 0.00329544 0.932782
1844 1 0.496915 0.687471 0.812969
78 1 0.439879 0.31461 0.997363
519 1 0.179943 0.996041 0.56107
1639 1 0.19275 0.875621 0.56564
1643 1 0.312943 0.872063 0.565332
1644 1 0.251443 0.939321 0.563903
1766 1 0.189092 0.94488 0.63143
1769 1 0.25143 0.871309 0.628565
1770 1 0.311967 0.933079 0.622045
1609 1 0.247552 0.747915 0.497445
2021 1 0.120144 0.88256 0.874696
527 1 0.434637 1.00254 0.560988
1647 1 0.435411 0.871619 0.561278
1648 1 0.371687 0.933975 0.55749
1773 1 0.372113 0.869834 0.626625
1774 1 0.437131 0.937785 0.622254
648 1 0.128144 0.060344 0.689507
675 1 0.0658383 0.123003 0.687604
770 1 0.0649177 0.0584914 0.749177
776 1 0.129528 0.0639422 0.814545
803 1 0.0627353 0.123328 0.813568
805 1 0.124035 0.122701 0.756365
2004 1 0.500024 0.806016 0.935548
652 1 0.252486 0.0627011 0.693495
679 1 0.189165 0.124869 0.687546
683 1 0.311096 0.12053 0.685537
774 1 0.185466 0.0635735 0.751981
778 1 0.318642 0.0637514 0.747924
780 1 0.251565 0.0682328 0.804002
807 1 0.187824 0.127145 0.818492
809 1 0.256308 0.132971 0.75025
811 1 0.313425 0.128423 0.812978
651 1 0.311877 1.00376 0.689711
775 1 0.188251 1.00088 0.812737
656 1 0.374677 0.0598313 0.684786
687 1 0.43767 0.129643 0.685866
782 1 0.434758 0.0684076 0.748523
784 1 0.374975 0.0638139 0.809596
813 1 0.379181 0.129412 0.749224
815 1 0.437811 0.131277 0.813431
655 1 0.438101 0.999534 0.683654
781 1 0.378789 0.000257986 0.753879
2024 1 0.122624 0.94071 0.936278
2018 1 0.0634879 0.941297 0.874987
2019 1 0.0635621 0.876498 0.934276
680 1 0.125075 0.183995 0.68791
707 1 0.0617235 0.245558 0.68734
712 1 0.124222 0.30845 0.687831
802 1 0.058063 0.187908 0.754036
808 1 0.120604 0.185896 0.815781
834 1 0.0655173 0.310641 0.751144
835 1 0.0638357 0.250911 0.812259
837 1 0.124285 0.245025 0.751237
840 1 0.128938 0.310005 0.811305
74 1 0.314128 0.31754 0.998792
684 1 0.24331 0.185909 0.688476
711 1 0.177855 0.247423 0.691821
715 1 0.314849 0.251358 0.685957
716 1 0.248559 0.314482 0.688561
806 1 0.182076 0.18509 0.756064
810 1 0.314632 0.194219 0.751943
812 1 0.251294 0.19536 0.808977
838 1 0.193888 0.310782 0.75041
839 1 0.186394 0.250827 0.813107
841 1 0.250045 0.24792 0.746821
842 1 0.310415 0.318716 0.749517
843 1 0.310144 0.253408 0.814508
844 1 0.25293 0.312187 0.81184
688 1 0.371408 0.185408 0.686972
719 1 0.441456 0.252327 0.687947
720 1 0.377403 0.312806 0.687223
814 1 0.437544 0.191535 0.749737
816 1 0.379531 0.191721 0.809123
845 1 0.373657 0.257614 0.746037
846 1 0.437825 0.316792 0.748212
847 1 0.440331 0.250742 0.8137
848 1 0.380056 0.309283 0.812902
901 1 0.128005 1.002 0.87877
689 1 0.502466 0.127875 0.629971
739 1 0.0590572 0.372999 0.685292
744 1 0.125998 0.435789 0.686343
866 1 0.0630369 0.435272 0.744382
867 1 0.0661926 0.370599 0.816054
869 1 0.126378 0.375738 0.749024
872 1 0.125712 0.435658 0.816784
1795 1 0.0673327 0.493409 0.813619
1667 1 0.0658716 0.497613 0.686039
1797 1 0.125086 0.501089 0.752967
788 1 0.494573 0.0655753 0.815541
743 1 0.185363 0.369822 0.685931
747 1 0.311593 0.373152 0.685958
748 1 0.248053 0.438257 0.6876
870 1 0.186172 0.440431 0.752426
871 1 0.190672 0.374886 0.811558
873 1 0.246638 0.377787 0.744758
874 1 0.311009 0.433426 0.747932
875 1 0.313853 0.372448 0.812982
876 1 0.249846 0.441052 0.814486
1671 1 0.18509 0.49641 0.690037
1968 1 0.376138 0.689756 0.932822
751 1 0.435219 0.37501 0.685503
752 1 0.373261 0.433558 0.682414
877 1 0.376586 0.378119 0.750996
878 1 0.442056 0.440662 0.749711
879 1 0.434425 0.378142 0.812494
880 1 0.37375 0.436949 0.812714
1679 1 0.441082 0.497717 0.683315
1805 1 0.380133 0.49615 0.742611
1966 1 0.439915 0.683673 0.875231
1672 1 0.129863 0.556937 0.687069
1699 1 0.0679603 0.626171 0.684144
1794 1 0.0608389 0.559489 0.747781
1800 1 0.124687 0.565789 0.815442
1827 1 0.0611083 0.626375 0.814135
1829 1 0.124351 0.622965 0.75111
1803 1 0.313956 0.499045 0.805342
1675 1 0.315422 0.499847 0.679389
1801 1 0.24954 0.498486 0.745632
1799 1 0.183453 0.50165 0.814458
1676 1 0.24704 0.560233 0.682967
1703 1 0.182789 0.624701 0.683993
1707 1 0.310272 0.621649 0.680478
1798 1 0.192547 0.561008 0.747299
1802 1 0.313432 0.559943 0.736448
1804 1 0.255885 0.559547 0.804673
1831 1 0.188803 0.620465 0.812385
1833 1 0.252584 0.621464 0.744144
1835 1 0.31339 0.623473 0.808585
1807 1 0.444045 0.496419 0.812799
1680 1 0.375743 0.557727 0.687213
1711 1 0.434572 0.618982 0.692617
1806 1 0.438702 0.556047 0.752676
1808 1 0.374728 0.558782 0.811723
1837 1 0.369941 0.623853 0.749582
1839 1 0.442174 0.625764 0.813109
1098 1 0.311401 0.810499 1.00112
2000 1 0.370608 0.807447 0.937775
1998 1 0.439687 0.810809 0.873357
1997 1 0.372108 0.74922 0.873494
1704 1 0.124216 0.688792 0.689716
1731 1 0.0610832 0.74718 0.68525
1736 1 0.127672 0.816897 0.687367
1826 1 0.0660012 0.686859 0.748803
1832 1 0.120691 0.686161 0.812593
1858 1 0.06687 0.815638 0.749184
1859 1 0.060878 0.755193 0.811528
1861 1 0.123573 0.750358 0.751073
1864 1 0.125723 0.814094 0.815879
1708 1 0.249107 0.687188 0.689129
1735 1 0.18336 0.75312 0.687098
1739 1 0.308805 0.749167 0.684725
1740 1 0.25103 0.812255 0.688481
1830 1 0.183798 0.679317 0.749402
1834 1 0.311282 0.683835 0.744492
1836 1 0.246313 0.685096 0.807236
1862 1 0.185159 0.815209 0.751692
1863 1 0.184277 0.746929 0.812556
1865 1 0.256086 0.753628 0.753478
1866 1 0.315212 0.812857 0.749824
1867 1 0.316363 0.754449 0.818707
1868 1 0.24399 0.812049 0.812279
1999 1 0.438136 0.749902 0.936928
1712 1 0.376723 0.678836 0.686753
1743 1 0.437488 0.752592 0.686741
1744 1 0.377647 0.807766 0.690475
1838 1 0.443063 0.683938 0.748302
1840 1 0.378601 0.687034 0.810061
1869 1 0.374412 0.748409 0.749611
1870 1 0.43531 0.810878 0.751591
1871 1 0.436487 0.749059 0.813649
1872 1 0.374771 0.816162 0.811894
1094 1 0.184275 0.813751 1.00006
1037 1 0.377651 0.500212 1.0059
643 1 0.0627477 0.999088 0.684968
771 1 0.0622834 0.00176828 0.81241
773 1 0.128141 1.00016 0.749953
1763 1 0.064928 0.876625 0.689129
1768 1 0.128783 0.940735 0.688995
1890 1 0.0643719 0.94109 0.748633
1891 1 0.0636711 0.875622 0.81075
1893 1 0.124011 0.875721 0.751191
1896 1 0.122063 0.940665 0.812498
1924 1 -9.15941e-05 0.558299 0.938115
777 1 0.253906 1.00259 0.751776
779 1 0.315163 -4.91504e-05 0.81568
647 1 0.192497 -0.0018102 0.690699
1767 1 0.186667 0.874582 0.686758
1771 1 0.315797 0.872878 0.683573
1772 1 0.255723 0.937383 0.691268
1894 1 0.186952 0.93287 0.753131
1895 1 0.188184 0.878345 0.822254
1897 1 0.248833 0.874148 0.751524
1898 1 0.309811 0.934713 0.757969
1899 1 0.311599 0.871811 0.814511
1900 1 0.252106 0.93861 0.816369
783 1 0.439167 0.00265187 0.812783
1775 1 0.439821 0.87562 0.684281
1776 1 0.375032 0.936154 0.688809
1901 1 0.377669 0.876218 0.753351
1902 1 0.443638 0.936719 0.745094
1903 1 0.442777 0.875933 0.810228
1904 1 0.375997 0.935555 0.811775
1652 1 0.496284 0.942102 0.567092
641 1 0.00583405 -0.000729188 0.626502
898 1 0.0656409 0.0613879 0.872653
904 1 0.128965 0.0615661 0.936436
931 1 0.0646859 0.125792 0.939081
933 1 0.123158 0.1227 0.877064
66 1 0.0667541 0.318446 1.00308
1553 1 0.496333 0.497818 0.495367
905 1 0.252212 -0.000515712 0.873837
907 1 0.316797 0.00256747 0.937643
902 1 0.192867 0.0579331 0.87141
906 1 0.30993 0.0578507 0.873908
908 1 0.255848 0.0661966 0.941086
935 1 0.188536 0.126679 0.934543
937 1 0.255764 0.12635 0.869793
939 1 0.313528 0.128651 0.935081
817 1 0.49848 0.122159 0.747947
1954 1 0.0628528 0.686256 0.880316
911 1 0.44012 -0.00062172 0.936532
1992 1 0.128489 0.803033 0.940007
909 1 0.377323 -0.000555195 0.880394
910 1 0.435144 0.0662903 0.872931
912 1 0.374392 0.0644014 0.932288
941 1 0.373501 0.120075 0.868099
943 1 0.43519 0.121372 0.937937
1989 1 0.122644 0.746169 0.872713
1987 1 0.0566181 0.751223 0.936875
993 1 0.002283 0.375032 0.873466
1825 1 0.00831613 0.621926 0.74935
930 1 0.057479 0.186685 0.875131
936 1 0.124741 0.197046 0.937448
962 1 0.0625304 0.309159 0.87567
963 1 0.0634229 0.253453 0.939809
965 1 0.122379 0.252331 0.875638
968 1 0.125329 0.312801 0.938262
1962 1 0.313662 0.683625 0.869927
596 1 0.50317 0.310605 0.557573
1777 1 0.500341 0.874694 0.619993
1990 1 0.190841 0.803354 0.879944
545 1 0.000210539 0.125893 0.500407
934 1 0.182126 0.191902 0.875103
938 1 0.31078 0.191278 0.87352
940 1 0.252625 0.193488 0.93802
966 1 0.186174 0.310545 0.876022
967 1 0.186962 0.248848 0.942272
969 1 0.248885 0.253433 0.872793
970 1 0.311417 0.310923 0.876214
971 1 0.318257 0.253325 0.933843
972 1 0.249684 0.313198 0.933435
628 1 0.499814 0.43959 0.563793
1986 1 0.0679391 0.817274 0.881029
644 1 -0.00413542 0.059809 0.685675
1764 1 -0.00239757 0.935258 0.689887
564 1 0.496008 0.188126 0.560233
973 1 0.37734 0.246656 0.872191
944 1 0.379304 0.187944 0.939822
942 1 0.437963 0.189432 0.876325
976 1 0.377693 0.317216 0.939963
975 1 0.445605 0.251 0.935826
974 1 0.43981 0.315061 0.876877
1933 1 0.376676 0.493609 0.872272
1964 1 0.251253 0.684854 0.930565
995 1 0.065405 0.372423 0.933533
1000 1 0.131195 0.440757 0.937235
994 1 0.0623419 0.437682 0.876508
997 1 0.129821 0.376075 0.877737
1070 1 0.439844 0.685526 1.00024
1996 1 0.244597 0.810608 0.942471
998 1 0.188776 0.438275 0.875176
1001 1 0.247382 0.373357 0.869666
999 1 0.190468 0.374702 0.932058
1004 1 0.251272 0.438337 0.932547
1002 1 0.313205 0.432893 0.875032
1003 1 0.311337 0.377111 0.938028
1927 1 0.195429 0.499868 0.934253
1960 1 0.126727 0.691984 0.939132
1 1 0.00436475 -0.00241054 0.995194
1935 1 0.442247 0.498514 0.940102
1934 1 0.441526 0.557328 0.87553
1993 1 0.252614 0.743656 0.872804
1965 1 0.37562 0.620681 0.877935
1991 1 0.191553 0.745422 0.93741
109 1 0.380828 0.378579 0.998134
1008 1 0.38016 0.436389 0.938704
1007 1 0.441756 0.374978 0.937109
1005 1 0.374192 0.375073 0.87169
1006 1 0.439812 0.434921 0.874401
1995 1 0.311004 0.746776 0.936522
1925 1 0.1211 0.500347 0.877627
1923 1 0.0637427 0.497157 0.940242
1922 1 0.0625044 0.559499 0.876071
1928 1 0.123482 0.558739 0.940673
1955 1 0.0644578 0.617384 0.941844
1957 1 0.127067 0.627237 0.871958
1994 1 0.306606 0.811408 0.883031
1963 1 0.316981 0.62152 0.934884
1961 1 0.254675 0.622994 0.869286
1932 1 0.257294 0.56127 0.934142
1931 1 0.315863 0.493316 0.93957
1959 1 0.185295 0.627024 0.93562
1929 1 0.248636 0.498787 0.871531
1926 1 0.187637 0.563845 0.882306
1930 1 0.312421 0.554027 0.870825
1967 1 0.443323 0.624893 0.932696
1936 1 0.370298 0.558216 0.938878
1958 1 0.186658 0.683648 0.874892
1713 1 0.49757 0.616914 0.624328
1892 1 -0.00184886 0.938747 0.809186
532 1 0.500031 0.0575155 0.558523
945 1 0.500772 0.125648 0.878398
868 1 0.0040553 0.43254 0.80903
580 1 0.000708972 0.313294 0.563444
1684 1 0.501952 0.557779 0.687804
1572 1 -0.000500131 0.691335 0.562045
1860 1 0.00188093 0.816538 0.812663
1988 1 0.00372531 0.813429 0.935963
1953 1 0.00363215 0.624622 0.877699
961 1 -0.00342324 0.24952 0.878047
820 1 0.503476 0.182313 0.817508
929 1 -0.00448578 0.127354 0.876885
900 1 -0.00396885 0.0639565 0.935706
1857 1 0.0077197 0.753349 0.747233
660 1 0.493946 0.0625243 0.686755
1793 1 0.0047145 0.495927 0.746367
1646 1 0.435662 0.937587 0.497279
1062 1 0.191883 0.685639 0.998671
1133 1 0.374119 0.875547 0.996237
561 1 0.497688 0.124364 0.50119
98 1 0.0642239 0.436234 1.00015
1029 1 0.128821 0.500517 1.00361
1065 1 0.250203 0.618174 0.995509
105 1 0.249247 0.37787 0.999124
1069 1 0.379797 0.623686 0.992217
550 1 0.194661 0.186846 0.501945
1038 1 0.441785 0.563918 0.997964
1026 1 0.0671187 0.557065 1.0065
34 1 0.0665448 0.188376 1.00363
102 1 0.190836 0.439952 1.00093
70 1 0.184921 0.313895 0.996719
2 1 0.0637328 0.0622296 0.997802
1610 1 0.309213 0.812236 0.500618
610 1 0.0617435 0.438763 0.497906
1634 1 0.0625082 0.941941 0.503477
546 1 0.0644266 0.186581 0.506098
1126 1 0.186882 0.933918 0.994815
1614 1 0.441367 0.812064 0.49812
1582 1 0.4429 0.690711 0.501556
1025 1 0.00243718 0.496597 0.999772
1642 1 0.306481 0.932868 0.499309
14 1 0.437802 0.0577394 0.997652
1066 1 0.310036 0.681924 0.995108
1578 1 0.312167 0.685176 0.494693
1550 1 0.433493 0.558617 0.503146
37 1 0.127922 0.121828 0.99816
101 1 0.129734 0.375202 0.997699
1030 1 0.184218 0.563112 1.00199
577 1 -0.00212934 0.244826 0.502702
1137 1 0.496396 0.875431 0.999107
1102 1 0.440556 0.815455 0.99997
609 1 -0.00116001 0.379188 0.501298
1125 1 0.124189 0.873875 0.995099
1549 1 0.375342 0.499072 0.503484
6 1 0.19694 0.0674476 0.996059
517 1 0.125504 0.00435885 0.49665
1546 1 0.31015 0.561599 0.507038
24 1 0.626399 0.0573837 0.0620094
51 1 0.569128 0.123393 0.0632087
146 1 0.561952 0.0651107 0.127877
181 1 0.626327 0.122176 0.122112
121 1 0.747134 0.375143 -0.00464349
18 1 0.560791 0.0655138 0.00158793
19 1 0.567621 0.997209 0.0621821
28 1 0.75148 0.0641966 0.0647518
55 1 0.692542 0.120706 0.0623231
59 1 0.810665 0.126596 0.063853
150 1 0.690125 0.0594092 0.128109
154 1 0.809862 0.0581578 0.125649
185 1 0.747725 0.127995 0.131601
153 1 0.751709 0.998759 0.133326
23 1 0.687474 1.00078 0.0638551
260 1 1.00127 0.0647922 0.314466
36 1 1.00096 0.182041 0.0592992
32 1 0.872741 0.0637825 0.0606589
63 1 0.935846 0.124073 0.0650631
158 1 0.935715 0.0623437 0.124332
189 1 0.870769 0.123871 0.126185
157 1 0.874158 -0.00109471 0.119846
1492 1 0.504118 0.812252 0.436539
31 1 0.941162 0.998157 0.0582881
56 1 0.625941 0.190568 0.0675596
83 1 0.560225 0.249685 0.0709225
88 1 0.626287 0.316939 0.0644042
178 1 0.566142 0.189259 0.133365
210 1 0.564529 0.314105 0.127352
213 1 0.626413 0.253021 0.132879
1536 1 0.872164 0.939144 0.441238
449 1 0.998967 0.2439 0.371248
20 1 0.500967 0.0603355 0.0611252
1535 1 0.938574 0.873295 0.43828
60 1 0.75152 0.191677 0.067389
87 1 0.686276 0.252742 0.0679084
91 1 0.811584 0.252902 0.0645685
92 1 0.747356 0.308827 0.0662207
182 1 0.68967 0.192822 0.129438
186 1 0.808864 0.187231 0.123336
214 1 0.686486 0.319176 0.132663
217 1 0.749654 0.250111 0.132529
218 1 0.812716 0.313058 0.129589
116 1 0.498315 0.437946 0.0670291
289 1 1.0012 0.122863 0.252484
1049 1 0.750592 0.494831 -0.000278839
64 1 0.871194 0.182681 0.068798
95 1 0.936523 0.242788 0.0708239
96 1 0.87915 0.315597 0.0640727
190 1 0.940601 0.179758 0.129022
221 1 0.874443 0.247053 0.127375
222 1 0.938344 0.314722 0.124155
126 1 0.940549 0.440688 -0.00584294
337 1 0.499266 0.259721 0.255032
53 1 0.626557 0.123389 0.00283229
115 1 0.564066 0.38222 0.061024
120 1 0.627999 0.44224 0.0637311
242 1 0.56656 0.438885 0.128978
245 1 0.625999 0.374625 0.128706
1173 1 0.62977 0.502677 0.125671
119 1 0.687136 0.379375 0.063297
123 1 0.811731 0.370608 0.0596388
124 1 0.751973 0.439314 0.0664706
246 1 0.691414 0.435713 0.126975
249 1 0.753198 0.371848 0.125352
250 1 0.812756 0.431806 0.125983
1047 1 0.689587 0.500318 0.0689718
1561 1 0.750699 0.499362 0.5044
127 1 0.936279 0.376923 0.0558252
128 1 0.872876 0.436011 0.060464
253 1 0.873533 0.374079 0.127734
254 1 0.935464 0.435373 0.12306
1534 1 0.938435 0.947133 0.378054
1043 1 0.560857 0.502843 0.0616467
1048 1 0.624805 0.561807 0.0650246
1075 1 0.56369 0.620688 0.0650105
1170 1 0.561483 0.561674 0.128812
1205 1 0.632945 0.627039 0.122394
89 1 0.752108 0.248007 0.00119117
1051 1 0.815899 0.499205 0.0699382
1177 1 0.753016 0.49875 0.12346
1052 1 0.746027 0.562174 0.0632045
1079 1 0.687681 0.626921 0.0642752
1083 1 0.812019 0.625144 0.0619209
1174 1 0.688241 0.564369 0.124893
1178 1 0.812419 0.561752 0.128513
1209 1 0.7495 0.626379 0.12455
1105 1 0.500748 0.748029 -0.000474966
598 1 0.688593 0.311383 0.502429
30 1 0.941167 0.0630112 -6.47857e-05
1181 1 0.882246 0.499666 0.123565
1055 1 0.939488 0.494432 0.0609133
1056 1 0.876927 0.558049 0.0627759
1087 1 0.941936 0.626067 0.0633304
1182 1 0.938323 0.554363 0.127973
1213 1 0.876933 0.621234 0.123099
1533 1 0.875767 0.878181 0.376209
1053 1 0.87603 0.503417 0.00284468
1149 1 0.876159 0.876313 0.00210626
22 1 0.685798 0.0655557 -0.000597379
1364 1 0.498441 0.812949 0.309874
1080 1 0.627009 0.687896 0.0668721
1107 1 0.564198 0.749335 0.0661631
1112 1 0.624278 0.813019 0.0635718
1202 1 0.562684 0.691889 0.128063
1234 1 0.562453 0.820092 0.129344
1237 1 0.62981 0.753055 0.126427
465 1 0.503834 0.250517 0.377166
84 1 0.499112 0.312148 0.0631002
1537 1 0.995842 0.49859 0.493157
1084 1 0.751238 0.684951 0.057661
1111 1 0.686711 0.75061 0.0671969
1115 1 0.815331 0.749606 0.0657126
1116 1 0.753793 0.805134 0.0645913
1206 1 0.68759 0.688848 0.12561
1210 1 0.809828 0.685595 0.121423
1238 1 0.691506 0.815457 0.122813
1241 1 0.750402 0.746865 0.126272
1242 1 0.816504 0.809113 0.130981
212 1 0.504838 0.31512 0.183909
637 1 0.878332 0.376708 0.498344
90 1 0.814058 0.310888 0.00203436
1088 1 0.875141 0.685287 0.0659761
1119 1 0.942222 0.751437 0.0601177
1120 1 0.875243 0.815143 0.0679469
1214 1 0.936526 0.689404 0.124881
1245 1 0.876961 0.75139 0.124961
1246 1 0.937681 0.812435 0.125173
407 1 0.690365 0.00416773 0.43904
149 1 0.627509 0.999859 0.125985
1139 1 0.555724 0.876777 0.0587742
1144 1 0.626344 0.934276 0.0630045
1266 1 0.565858 0.934227 0.123201
1269 1 0.625854 0.873164 0.127484
420 1 0.997661 0.189904 0.438993
177 1 0.499892 0.124136 0.121665
50 1 0.558885 0.188503 -0.00183613
1532 1 0.749237 0.942934 0.438125
468 1 0.503894 0.316023 0.435197
1412 1 0.995999 0.562911 0.435046
27 1 0.805684 0.0020473 0.0579308
1143 1 0.6862 0.873127 0.0624444
1147 1 0.811813 0.87246 0.0636943
1148 1 0.748896 0.936742 0.0598553
1270 1 0.689207 0.937451 0.124815
1273 1 0.75261 0.876919 0.124818
1274 1 0.816338 0.93598 0.124011
62 1 0.929887 0.184326 0.00225384
436 1 0.498285 0.190925 0.440001
1089 1 1.0043 0.750887 0.00110582
225 1 0.998286 0.373808 0.124073
1151 1 0.939448 0.877288 0.0647018
1152 1 0.874055 0.938479 0.0574861
1277 1 0.877001 0.876588 0.130924
1278 1 0.939212 0.937828 0.123423
1531 1 0.811773 0.872399 0.442003
1530 1 0.815692 0.935998 0.370798
152 1 0.626527 0.0646744 0.185552
179 1 0.562604 0.127133 0.184483
274 1 0.55994 0.0607368 0.24737
280 1 0.624229 0.0641817 0.315419
307 1 0.558075 0.124656 0.31255
309 1 0.619585 0.125554 0.248222
277 1 0.623903 0.00155706 0.251003
147 1 0.566657 -0.00708448 0.18413
156 1 0.747393 0.0650819 0.187682
183 1 0.681318 0.125598 0.189112
187 1 0.812724 0.12481 0.187469
278 1 0.681987 0.0578485 0.25069
282 1 0.810175 0.0570482 0.244455
284 1 0.743005 0.064281 0.313457
311 1 0.683918 0.121742 0.31048
313 1 0.746081 0.125577 0.25012
315 1 0.813855 0.124746 0.312645
281 1 0.743419 -0.00163245 0.249864
283 1 0.811966 0.00988793 0.31442
151 1 0.686569 0.994615 0.187699
1529 1 0.752064 0.885585 0.378473
160 1 0.873094 0.0583237 0.183269
191 1 0.940276 0.120001 0.188919
286 1 0.935403 0.0609867 0.250638
288 1 0.871732 0.0636187 0.314754
317 1 0.878297 0.117543 0.247656
319 1 0.94055 0.124761 0.312327
285 1 0.875682 1.00398 0.250372
159 1 0.936573 0.998677 0.188263
1527 1 0.687548 0.8803 0.439749
184 1 0.623357 0.19094 0.190932
211 1 0.557477 0.254581 0.191985
216 1 0.62441 0.313523 0.192085
306 1 0.562656 0.192125 0.250491
312 1 0.617915 0.189762 0.310203
338 1 0.567204 0.313978 0.25679
339 1 0.562825 0.252283 0.31273
341 1 0.626044 0.251465 0.25013
344 1 0.634351 0.313854 0.310816
1526 1 0.683496 0.943375 0.37458
209 1 0.495059 0.245963 0.124139
188 1 0.749056 0.188543 0.18898
215 1 0.68662 0.250302 0.193292
219 1 0.813533 0.249404 0.187642
220 1 0.754578 0.314291 0.189746
310 1 0.687238 0.185585 0.255127
314 1 0.811393 0.186446 0.249862
316 1 0.750716 0.191159 0.313574
342 1 0.695194 0.3167 0.247933
343 1 0.687677 0.249811 0.314641
345 1 0.749955 0.24828 0.247321
346 1 0.816086 0.314364 0.24951
347 1 0.813691 0.252189 0.309905
348 1 0.750108 0.312268 0.310734
1566 1 0.937206 0.5645 0.497417
192 1 0.872576 0.184583 0.185538
223 1 0.930222 0.247659 0.184436
224 1 0.873957 0.315106 0.18587
318 1 0.93404 0.184579 0.246518
320 1 0.872808 0.18723 0.312327
349 1 0.871422 0.245333 0.246785
350 1 0.932937 0.307128 0.249369
351 1 0.932197 0.249593 0.310561
352 1 0.873114 0.312677 0.313165
500 1 0.49818 0.438073 0.433481
405 1 0.618308 1.00107 0.376128
403 1 0.56116 1.00065 0.436654
243 1 0.565488 0.370796 0.194017
248 1 0.623906 0.435321 0.194627
370 1 0.561381 0.442843 0.250657
371 1 0.562229 0.376432 0.314919
373 1 0.630369 0.376981 0.253648
376 1 0.627152 0.440744 0.312374
1299 1 0.56074 0.503584 0.309368
1301 1 0.627739 0.500863 0.249164
1525 1 0.626846 0.875706 0.374005
247 1 0.687605 0.378971 0.188348
251 1 0.816702 0.374796 0.191324
252 1 0.751907 0.438688 0.190764
374 1 0.687907 0.441711 0.247966
375 1 0.691284 0.377807 0.31381
377 1 0.756052 0.373455 0.250226
378 1 0.812192 0.435866 0.254067
379 1 0.811052 0.379165 0.311514
380 1 0.746239 0.439716 0.309231
1303 1 0.687375 0.502855 0.310039
255 1 0.938987 0.372358 0.186859
256 1 0.874585 0.434938 0.190311
381 1 0.876132 0.373183 0.24982
382 1 0.939547 0.435715 0.249803
383 1 0.940315 0.374917 0.309483
384 1 0.877112 0.432247 0.313713
1528 1 0.626326 0.941703 0.437469
1171 1 0.560198 0.503369 0.190573
1176 1 0.629624 0.569354 0.183701
1203 1 0.564167 0.625842 0.187202
1298 1 0.565303 0.566679 0.248547
1304 1 0.625224 0.560892 0.312594
1331 1 0.563836 0.624798 0.308937
1333 1 0.6295 0.621272 0.251887
1307 1 0.813554 0.494678 0.315562
1175 1 0.690326 0.500886 0.184464
1179 1 0.818176 0.502332 0.187223
1305 1 0.754885 0.503189 0.250601
1180 1 0.747396 0.559749 0.178471
1207 1 0.690935 0.625492 0.187462
1211 1 0.80858 0.627457 0.186713
1302 1 0.689262 0.556031 0.247502
1306 1 0.809424 0.563652 0.252903
1308 1 0.746103 0.561675 0.307773
1335 1 0.687871 0.6278 0.316868
1337 1 0.748447 0.624246 0.250279
1339 1 0.811867 0.624245 0.313207
1183 1 0.942947 0.497219 0.190815
1311 1 0.935515 0.499938 0.313956
1309 1 0.872336 0.499664 0.255478
1184 1 0.878004 0.558334 0.18806
1215 1 0.932909 0.623217 0.19029
1310 1 0.936606 0.559162 0.250335
1312 1 0.874764 0.565642 0.31578
1341 1 0.866591 0.620323 0.248564
1343 1 0.932963 0.626077 0.316859
1522 1 0.5613 0.933637 0.37552
1523 1 0.56598 0.878902 0.439201
530 1 0.558157 0.0635147 0.497017
1208 1 0.62729 0.683205 0.182406
1235 1 0.563978 0.753216 0.185366
1240 1 0.626695 0.814612 0.188029
1330 1 0.564838 0.687784 0.249435
1336 1 0.626283 0.689862 0.311601
1362 1 0.560157 0.809616 0.247972
1363 1 0.568586 0.752848 0.316687
1365 1 0.627499 0.748197 0.249301
1368 1 0.630481 0.815408 0.31868
25 1 0.743762 0.00530171 -0.00434486
1332 1 0.504929 0.694435 0.307064
1472 1 0.874234 0.693798 0.438284
1212 1 0.752717 0.688854 0.184415
1239 1 0.689571 0.757897 0.191435
1243 1 0.814315 0.750896 0.192066
1244 1 0.747304 0.814934 0.191123
1334 1 0.688464 0.68811 0.243511
1338 1 0.813803 0.686647 0.252243
1340 1 0.753326 0.692242 0.314621
1366 1 0.689818 0.818837 0.255166
1367 1 0.688748 0.752748 0.315006
1369 1 0.752134 0.750355 0.256118
1370 1 0.808156 0.814049 0.252722
1371 1 0.812666 0.753459 0.318654
1372 1 0.749297 0.813962 0.31357
1503 1 0.93913 0.750093 0.44067
1216 1 0.870858 0.688393 0.191009
1247 1 0.932469 0.751796 0.193049
1248 1 0.876798 0.813966 0.188085
1342 1 0.932146 0.689624 0.248402
1344 1 0.874926 0.688589 0.31051
1373 1 0.87476 0.753937 0.25347
1374 1 0.940045 0.818557 0.251944
1375 1 0.940855 0.750224 0.315017
1376 1 0.877197 0.814635 0.315132
1621 1 0.625842 0.754317 0.502859
257 1 1.00056 1.00335 0.251495
1501 1 0.87711 0.751429 0.374278
275 1 0.562135 0.00157285 0.316362
1267 1 0.558077 0.875087 0.192856
1272 1 0.627612 0.935189 0.191423
1394 1 0.562675 0.937239 0.245528
1395 1 0.562815 0.877199 0.312535
1397 1 0.625178 0.871771 0.252176
1400 1 0.621324 0.93477 0.311059
1504 1 0.877075 0.807644 0.437581
279 1 0.685421 0.99602 0.313104
155 1 0.810611 0.999327 0.186718
1271 1 0.688447 0.872861 0.189733
1275 1 0.813736 0.874178 0.186308
1276 1 0.745509 0.936299 0.194416
1398 1 0.68642 0.936438 0.254259
1399 1 0.689264 0.877546 0.314222
1401 1 0.749206 0.879059 0.251246
1402 1 0.807056 0.936569 0.253101
1403 1 0.811045 0.873497 0.315128
1404 1 0.751885 0.943789 0.316185
287 1 0.938713 0.000483162 0.308656
1279 1 0.938701 0.873123 0.190021
1280 1 0.872766 0.941256 0.189545
1405 1 0.874899 0.874592 0.245922
1406 1 0.929566 0.937995 0.250949
1407 1 0.933783 0.879961 0.314496
1408 1 0.877066 0.941669 0.314366
1380 1 0.998659 0.941054 0.312307
402 1 0.558265 0.060602 0.375813
408 1 0.620313 0.0682182 0.437008
435 1 0.555667 0.127692 0.435529
437 1 0.622625 0.130653 0.377408
1316 1 0.997946 0.689577 0.318756
1042 1 0.56249 0.564875 -0.00282372
1041 1 0.501524 0.497927 -0.00452964
452 1 1.0021 0.307144 0.438702
1470 1 0.937341 0.689029 0.382574
409 1 0.753021 -0.00092366 0.37568
411 1 0.814901 1.00091 0.435851
406 1 0.683113 0.0623334 0.375204
410 1 0.809491 0.0651039 0.377121
412 1 0.749293 0.061342 0.438738
439 1 0.679536 0.126744 0.442413
441 1 0.746317 0.124035 0.379538
443 1 0.806858 0.127981 0.44535
1172 1 0.50147 0.563695 0.184205
1494 1 0.687964 0.816248 0.380681
1462 1 0.691297 0.690319 0.379678
415 1 0.93017 0.00799177 0.437365
1466 1 0.810122 0.684164 0.380375
1495 1 0.690644 0.756526 0.445918
413 1 0.870813 0.00453861 0.373829
414 1 0.93683 0.0685827 0.375239
416 1 0.873511 0.066177 0.437316
445 1 0.872332 0.127983 0.373344
447 1 0.933267 0.131241 0.439119
601 1 0.751013 0.250811 0.50185
1653 1 0.623233 0.879187 0.504394
472 1 0.630641 0.318209 0.434524
469 1 0.630933 0.246899 0.37193
467 1 0.568244 0.256945 0.436316
434 1 0.564443 0.189482 0.376455
440 1 0.61976 0.189431 0.441288
466 1 0.563533 0.311114 0.371584
1044 1 0.499505 0.561926 0.0637179
442 1 0.805569 0.182486 0.381608
438 1 0.69162 0.183468 0.374521
471 1 0.685088 0.247099 0.440329
473 1 0.751998 0.247862 0.375144
474 1 0.812794 0.316446 0.373299
444 1 0.745523 0.190573 0.440185
475 1 0.80826 0.253556 0.43643
476 1 0.750052 0.315558 0.438847
470 1 0.691867 0.312157 0.374491
1468 1 0.748035 0.68663 0.445906
1499 1 0.811555 0.752183 0.438162
1565 1 0.876794 0.499229 0.499817
1502 1 0.936755 0.812917 0.377779
1498 1 0.812308 0.812212 0.37293
1108 1 0.503369 0.80935 0.0639841
1458 1 0.565434 0.687178 0.370264
566 1 0.681689 0.186346 0.503035
479 1 0.937811 0.248827 0.439996
480 1 0.874524 0.311738 0.441792
478 1 0.935466 0.307722 0.38129
477 1 0.873857 0.248389 0.375406
446 1 0.933488 0.187279 0.377199
448 1 0.868713 0.189084 0.438119
164 1 0.995929 0.181515 0.188601
1490 1 0.563986 0.817228 0.377347
1073 1 0.498721 0.626729 -0.00120564
499 1 0.56465 0.378642 0.435035
504 1 0.628946 0.443282 0.436079
501 1 0.62468 0.373385 0.367899
498 1 0.559649 0.443346 0.374542
1427 1 0.561905 0.505425 0.434401
1429 1 0.624059 0.50078 0.372679
1464 1 0.625624 0.689332 0.436164
508 1 0.747801 0.441489 0.438185
507 1 0.812376 0.377122 0.440751
506 1 0.810007 0.434113 0.378169
505 1 0.751756 0.376641 0.37226
503 1 0.689509 0.377169 0.436528
502 1 0.693328 0.440835 0.374794
1433 1 0.751441 0.501759 0.368061
1435 1 0.810674 0.50166 0.436403
1496 1 0.626953 0.814239 0.440216
1493 1 0.628667 0.75222 0.382282
58 1 0.815506 0.185342 0.00289469
512 1 0.872972 0.441895 0.432139
511 1 0.937802 0.370963 0.43853
510 1 0.937571 0.435915 0.378947
509 1 0.87491 0.370873 0.375597
1439 1 0.935458 0.500094 0.434809
196 1 0.994652 0.306055 0.186161
1426 1 0.567772 0.568696 0.374452
1432 1 0.625505 0.563538 0.438437
1459 1 0.564636 0.631296 0.433894
1461 1 0.626579 0.624993 0.37657
1491 1 0.559137 0.746751 0.438681
1500 1 0.749717 0.818052 0.439105
1431 1 0.691916 0.501164 0.442349
1463 1 0.68841 0.62397 0.439778
1436 1 0.751012 0.564175 0.435274
1434 1 0.810591 0.562142 0.374977
1430 1 0.688389 0.55918 0.375962
1467 1 0.806719 0.626823 0.441497
1465 1 0.749899 0.627537 0.374793
1497 1 0.746086 0.755554 0.373734
1233 1 0.503187 0.755399 0.126017
1437 1 0.875349 0.50203 0.374693
1438 1 0.934309 0.560364 0.37365
1440 1 0.871699 0.560766 0.436652
1469 1 0.870525 0.620823 0.378926
1471 1 0.938264 0.627157 0.434858
1524 1 0.496185 0.940243 0.439518
1633 1 1.00172 0.878616 0.504525
61 1 0.871642 0.119607 0.0011204
308 1 0.499633 0.188804 0.313705
145 1 0.504333 0.994294 0.123138
117 1 0.62515 0.376624 -0.000608342
1145 1 0.74768 0.871998 0.00237672
638 1 0.939468 0.435891 0.492934
1593 1 0.747953 0.625482 0.502839
602 1 0.812945 0.310314 0.496508
1521 1 0.49993 0.87722 0.369943
1618 1 0.562803 0.817464 0.500522
630 1 0.687523 0.438248 0.500669
1113 1 0.753883 0.744308 -0.00149954
1185 1 0.994837 0.621112 0.129289
52 1 0.504674 0.185094 0.0658266
1081 1 0.745937 0.623211 -0.00339365
1045 1 0.620626 0.501386 0.00174516
1489 1 0.501104 0.750688 0.369697
1076 1 0.504736 0.684138 0.0665576
1444 1 0.996142 0.690878 0.436632
193 1 1.00331 0.238795 0.130464
305 1 0.499736 0.12483 0.249097
1284 1 0.998863 0.561875 0.317813
68 1 0.997609 0.315429 0.0636683
1201 1 0.502786 0.622181 0.123368
1217 1 0.999789 0.756978 0.133099
356 1 0.999547 0.439591 0.311844
634 1 0.81745 0.438837 0.498112
565 1 0.616012 0.126036 0.500062
1313 1 0.994137 0.625911 0.250877
1562 1 0.807679 0.559778 0.501099
1300 1 0.498344 0.566131 0.308456
1220 1 1.00136 0.82078 0.189322
1204 1 0.501695 0.686336 0.181146
1409 1 0.999563 0.501652 0.380515
321 1 0.993202 0.245019 0.249001
324 1 0.992403 0.309718 0.313357
273 1 0.500007 0.00198396 0.243554
369 1 0.498232 0.377285 0.25234
417 1 1.00032 0.129007 0.379843
1188 1 0.992419 0.691663 0.186962
241 1 0.502051 0.37949 0.127642
542 1 0.937185 0.0660915 0.500223
65 1 0.995246 0.243345 0.00801298
1597 1 0.876699 0.629893 0.495671
1558 1 0.690422 0.559202 0.499397
570 1 0.809431 0.191819 0.504315
114 1 0.566464 0.439002 0.000394092
541 1 0.871073 0.00384553 0.500574
86 1 0.68729 0.312034 0.00363204
122 1 0.812474 0.438638 -0.000130935
629 1 0.629611 0.378429 0.497702
626 1 0.559494 0.441416 0.494976
1086 1 0.935866 0.683333 0.00237814
1054 1 0.941694 0.558137 0.00338406
29 1 0.870304 0.00348507 0.000223778
93 1 0.875994 0.249718 0.0049311
534 1 0.681723 0.0633942 0.498693
57 1 0.751511 0.127965 0.00476684
593 1 0.505982 0.253682 0.497061
1077 1 0.625851 0.621929 -0.00332039
1662 1 0.935855 0.937067 0.499988
1085 1 0.873431 0.621288 0.00138137
54 1 0.68633 0.183964 0.00610228
1046 1 0.685568 0.559875 0.00519794
85 1 0.624662 0.248726 0.00791486
82 1 0.565603 0.31359 0.00707315
1050 1 0.80921 0.560008 0.00321695
94 1 0.940609 0.308038 0.00391409
536 1 0.625322 0.062472 0.558766
563 1 0.556949 0.125799 0.563134
658 1 0.556974 0.0651907 0.625805
693 1 0.623279 0.121473 0.626061
1716 1 0.502676 0.68355 0.687987
964 1 0.997863 0.313622 0.937494
612 1 0.99744 0.437776 0.563421
605 1 0.871862 0.248038 0.499288
540 1 0.749348 0.0676328 0.56802
567 1 0.682601 0.124021 0.560771
571 1 0.811639 0.130597 0.558475
662 1 0.686986 0.0596153 0.625566
666 1 0.812254 0.0704029 0.624109
697 1 0.745141 0.128796 0.626354
539 1 0.813371 0.00516001 0.565823
535 1 0.684509 0.00879617 0.558864
665 1 0.747587 -0.000768396 0.622979
573 1 0.869875 0.130677 0.5004
544 1 0.870994 0.0698446 0.559107
575 1 0.934832 0.12301 0.563343
670 1 0.941061 0.058981 0.620028
701 1 0.877958 0.129353 0.622136
865 1 0.998904 0.37032 0.746496
2017 1 1.00193 0.872715 0.870714
568 1 0.62191 0.186143 0.559143
595 1 0.559993 0.246587 0.558802
600 1 0.624292 0.315724 0.560503
690 1 0.561991 0.190172 0.618902
722 1 0.5643 0.312528 0.619067
725 1 0.624324 0.252563 0.621387
1630 1 0.934644 0.814651 0.501791
708 1 0.995448 0.310251 0.685878
916 1 0.501195 0.0620524 0.937784
1556 1 0.50485 0.560217 0.554625
572 1 0.745244 0.185298 0.560453
599 1 0.687589 0.251222 0.560771
603 1 0.808188 0.249005 0.562985
604 1 0.75041 0.320739 0.56141
694 1 0.68279 0.183606 0.621421
698 1 0.810656 0.189816 0.624987
726 1 0.692483 0.310118 0.623854
729 1 0.747327 0.247687 0.621291
730 1 0.811239 0.31678 0.620938
594 1 0.564511 0.319748 0.494181
1009 1 0.500801 0.372506 0.874704
576 1 0.87617 0.189442 0.564486
607 1 0.938401 0.255454 0.562257
608 1 0.87255 0.311964 0.563538
702 1 0.934881 0.194001 0.622701
733 1 0.872414 0.250092 0.626786
734 1 0.940491 0.311363 0.623376
657 1 0.501465 -8.35426e-06 0.62827
1626 1 0.810919 0.810424 0.504714
1748 1 0.498836 0.808089 0.684966
627 1 0.560569 0.377551 0.557422
632 1 0.621339 0.43656 0.559066
754 1 0.566227 0.435943 0.623454
757 1 0.62872 0.375508 0.624749
1685 1 0.627772 0.499044 0.618845
948 1 0.502606 0.188208 0.941444
631 1 0.68434 0.379782 0.564112
635 1 0.815663 0.377107 0.556827
636 1 0.753588 0.439804 0.562911
758 1 0.689017 0.444568 0.617908
761 1 0.747615 0.380071 0.623627
762 1 0.816019 0.436476 0.619469
1563 1 0.813404 0.50218 0.558655
1940 1 0.500806 0.558225 0.938726
639 1 0.932937 0.374114 0.565059
640 1 0.878583 0.433338 0.56324
765 1 0.875328 0.375627 0.626322
766 1 0.938332 0.435913 0.625389
1969 1 0.499989 0.626589 0.873156
1812 1 0.501469 0.563651 0.811574
996 1 0.997996 0.43519 0.93533
1555 1 0.564437 0.50315 0.56559
1560 1 0.627349 0.564549 0.564105
1587 1 0.563104 0.628817 0.559946
1682 1 0.564776 0.566266 0.62483
1717 1 0.626337 0.624624 0.624624
2048 1 0.871762 0.938841 0.939904
1078 1 0.683879 0.690427 1.00457
2047 1 0.944842 0.880436 0.933027
1697 1 1.00031 0.623375 0.626068
1559 1 0.689595 0.505491 0.56136
1689 1 0.748269 0.500969 0.622262
1564 1 0.746978 0.563856 0.565633
1591 1 0.686047 0.624792 0.563861
1595 1 0.814867 0.62293 0.557879
1686 1 0.688904 0.566735 0.628971
1690 1 0.817024 0.559547 0.624429
1721 1 0.756174 0.624915 0.628337
1109 1 0.622291 0.751406 1.00274
897 1 0.999246 -0.000199348 0.87137
1636 1 0.99962 0.945255 0.564407
1567 1 0.938298 0.497593 0.555053
1693 1 0.87871 0.499384 0.620143
1568 1 0.878825 0.565272 0.557921
1599 1 0.93702 0.624369 0.558547
1694 1 0.938108 0.560984 0.623898
1725 1 0.878455 0.620249 0.622291
538 1 0.806525 0.0659754 0.498967
801 1 1.00328 0.118854 0.747926
2046 1 0.938885 0.936418 0.872195
1592 1 0.627181 0.687899 0.564434
1619 1 0.56711 0.748129 0.563199
1624 1 0.620577 0.812764 0.564626
1714 1 0.558512 0.686243 0.622796
1746 1 0.558114 0.817806 0.624538
1749 1 0.622949 0.752937 0.623957
1985 1 0.995786 0.750227 0.871195
932 1 0.999549 0.187425 0.938107
1596 1 0.752931 0.68556 0.565132
1623 1 0.692979 0.744269 0.568313
1627 1 0.811505 0.752953 0.569103
1628 1 0.750011 0.811748 0.562083
1718 1 0.685374 0.683351 0.631438
1722 1 0.811315 0.684821 0.62996
1750 1 0.684378 0.812834 0.624209
1753 1 0.748827 0.748821 0.631811
1754 1 0.811281 0.813537 0.620341
769 1 0.999109 1.00139 0.749438
2045 1 0.875785 0.876496 0.875492
1600 1 0.876891 0.685602 0.567467
1631 1 0.940018 0.754874 0.564231
1632 1 0.879106 0.814098 0.56508
1726 1 0.940999 0.687491 0.623961
1757 1 0.883496 0.749336 0.626381
1758 1 0.939541 0.812969 0.629241
692 1 0.500266 0.186378 0.688749
1629 1 0.874021 0.750915 0.506283
661 1 0.622158 0.00449964 0.619363
531 1 0.560233 1.00567 0.56513
1651 1 0.559258 0.879336 0.562622
1656 1 0.62419 0.942147 0.560284
1778 1 0.565557 0.938511 0.624277
1781 1 0.623855 0.874624 0.626736
1665 1 1.00748 0.498998 0.620336
1655 1 0.684187 0.875369 0.564893
1659 1 0.814426 0.871892 0.56137
1660 1 0.742218 0.940053 0.565675
1782 1 0.685652 0.941839 0.623132
1785 1 0.747062 0.869051 0.623618
1786 1 0.80971 0.930268 0.624084
1876 1 0.496342 0.816999 0.810411
543 1 0.932523 1.00281 0.557369
1908 1 0.499746 0.941596 0.811795
705 1 1.00212 0.251947 0.622211
669 1 0.871775 0.00361138 0.623722
1663 1 0.939148 0.874875 0.561579
1664 1 0.875997 0.93799 0.568577
1789 1 0.872542 0.874506 0.626179
1790 1 0.939192 0.932791 0.630551
1681 1 0.503629 0.497027 0.62483
1780 1 0.498765 0.934745 0.679071
804 1 0.997422 0.184158 0.813409
664 1 0.627271 0.0601409 0.685426
691 1 0.565941 0.128575 0.690841
786 1 0.559622 0.0658013 0.748491
792 1 0.627584 0.064903 0.813809
819 1 0.560589 0.125 0.808166
821 1 0.620769 0.130373 0.75405
1589 1 0.623839 0.626579 0.501788
1121 1 0.998939 0.875463 0.999492
533 1 0.618971 0.00292298 0.497034
668 1 0.742139 0.0584481 0.687584
695 1 0.682224 0.12257 0.694772
699 1 0.81052 0.121399 0.693913
790 1 0.689454 0.0626318 0.752552
794 1 0.811836 0.0634171 0.755254
796 1 0.747585 0.0624971 0.819465
823 1 0.688556 0.127147 0.816757
825 1 0.749392 0.122334 0.752643
827 1 0.811429 0.127344 0.813882
667 1 0.812364 1.00063 0.685275
795 1 0.816004 0.999431 0.818825
663 1 0.682667 0.99898 0.689761
2044 1 0.746812 0.941721 0.937823
672 1 0.871814 0.0624039 0.689467
703 1 0.936911 0.12361 0.68494
798 1 0.934803 0.0630345 0.752354
800 1 0.874098 0.0629309 0.807341
829 1 0.874331 0.127244 0.749442
831 1 0.934905 0.123984 0.809759
797 1 0.87674 -0.00247329 0.744326
799 1 0.936047 1.00162 0.813415
671 1 0.939969 0.999533 0.681244
2043 1 0.806757 0.876815 0.940463
562 1 0.56055 0.190649 0.499631
1905 1 0.503122 0.872178 0.74862
696 1 0.61906 0.189812 0.686182
723 1 0.563554 0.251978 0.683638
728 1 0.623487 0.311416 0.687015
818 1 0.565543 0.190244 0.753721
824 1 0.624495 0.186816 0.81336
850 1 0.563858 0.31271 0.748633
851 1 0.562415 0.251142 0.814918
853 1 0.624037 0.250433 0.747587
856 1 0.627397 0.314262 0.8071
1106 1 0.557646 0.816034 1.00007
537 1 0.747764 0.00397396 0.507671
881 1 0.501367 0.378589 0.752078
700 1 0.748854 0.181919 0.691497
727 1 0.688633 0.249179 0.684047
731 1 0.812315 0.245172 0.68564
732 1 0.750013 0.311632 0.688534
822 1 0.682909 0.18638 0.750714
826 1 0.812937 0.187746 0.74981
828 1 0.747349 0.189095 0.814274
854 1 0.684612 0.310711 0.747626
855 1 0.682596 0.254286 0.812538
857 1 0.743739 0.248992 0.752974
858 1 0.812493 0.312174 0.754229
859 1 0.813311 0.247397 0.815295
860 1 0.751907 0.30934 0.818567
2042 1 0.809775 0.931431 0.878068
704 1 0.875345 0.183001 0.685343
735 1 0.935888 0.249331 0.689062
736 1 0.874943 0.312578 0.686174
830 1 0.938798 0.184145 0.748192
832 1 0.877421 0.189836 0.810633
861 1 0.873203 0.251209 0.751017
862 1 0.933223 0.311169 0.753581
863 1 0.937261 0.246006 0.818539
864 1 0.875986 0.311697 0.817803
2041 1 0.753224 0.87313 0.880036
755 1 0.568413 0.377973 0.682617
760 1 0.630767 0.439139 0.679265
882 1 0.570177 0.437219 0.744051
883 1 0.570915 0.375776 0.808601
885 1 0.63012 0.375877 0.742863
888 1 0.633499 0.435155 0.809437
1683 1 0.558615 0.499968 0.687144
1813 1 0.621802 0.498097 0.74813
2039 1 0.693006 0.877149 0.937045
759 1 0.693425 0.370259 0.690498
763 1 0.812873 0.371135 0.683095
764 1 0.756854 0.437174 0.688998
886 1 0.692636 0.438108 0.749422
887 1 0.689366 0.368 0.809596
889 1 0.757108 0.376778 0.75229
890 1 0.816451 0.438146 0.758123
891 1 0.816428 0.374932 0.814084
892 1 0.749291 0.438011 0.815293
1691 1 0.817056 0.500862 0.685414
1687 1 0.693493 0.496023 0.684844
1815 1 0.688592 0.501278 0.806312
1601 1 0.998189 0.754268 0.502528
767 1 0.933847 0.369134 0.68879
768 1 0.871057 0.436601 0.688753
893 1 0.870529 0.378615 0.753002
894 1 0.943409 0.436824 0.743745
895 1 0.937449 0.373579 0.80945
896 1 0.874899 0.441281 0.81802
1823 1 0.942449 0.494705 0.81412
1695 1 0.938484 0.498794 0.68575
2038 1 0.691888 0.937276 0.873613
1811 1 0.559121 0.50428 0.817591
1688 1 0.62612 0.560816 0.694434
1715 1 0.564187 0.622806 0.687051
1810 1 0.562691 0.560746 0.7513
1816 1 0.627087 0.560966 0.810439
1843 1 0.559977 0.621766 0.814617
1845 1 0.623712 0.623612 0.751228
1729 1 1.00013 0.747548 0.630251
1819 1 0.81315 0.49982 0.818719
1817 1 0.758853 0.494661 0.754225
1692 1 0.752753 0.559629 0.688283
1719 1 0.687413 0.622474 0.688208
1723 1 0.813085 0.621628 0.688465
1814 1 0.692384 0.557774 0.747116
1818 1 0.812081 0.559051 0.752692
1820 1 0.753566 0.56161 0.815999
1847 1 0.685996 0.624265 0.814222
1849 1 0.751825 0.622149 0.751699
1851 1 0.806525 0.629769 0.814625
980 1 0.505173 0.309955 0.938578
1821 1 0.877986 0.496722 0.755167
1696 1 0.878416 0.561154 0.691421
1727 1 0.943979 0.623766 0.690566
1822 1 0.938179 0.557336 0.749561
1824 1 0.875555 0.557864 0.813673
1853 1 0.878844 0.620584 0.753113
1855 1 0.940516 0.621467 0.812577
1970 1 0.562234 0.683599 0.874746
2003 1 0.565206 0.751521 0.931953
919 1 0.684777 -0.000733749 0.937477
915 1 0.56012 0.00314353 0.938457
1720 1 0.620777 0.687804 0.685316
1747 1 0.560711 0.750954 0.685767
1752 1 0.623764 0.80684 0.686901
1842 1 0.564344 0.684536 0.75137
1848 1 0.620227 0.695391 0.818099
1874 1 0.562116 0.809047 0.747475
1875 1 0.557328 0.751046 0.808235
1877 1 0.626192 0.749637 0.750225
1880 1 0.621461 0.814826 0.807445
1657 1 0.748273 0.877904 0.506938
1724 1 0.74805 0.683513 0.69098
1751 1 0.687248 0.747585 0.685905
1755 1 0.809384 0.748575 0.687537
1756 1 0.750784 0.810564 0.684731
1846 1 0.680453 0.684082 0.747696
1850 1 0.810418 0.68499 0.752482
1852 1 0.74561 0.689966 0.80796
1878 1 0.689452 0.808681 0.751353
1879 1 0.686631 0.754991 0.816165
1881 1 0.748396 0.747956 0.751364
1882 1 0.811778 0.807653 0.749222
1883 1 0.807825 0.74864 0.815081
1884 1 0.750378 0.810323 0.814202
1873 1 0.497291 0.750275 0.748411
1728 1 0.876274 0.684836 0.688409
1759 1 0.940906 0.749516 0.687253
1760 1 0.872447 0.815215 0.685213
1854 1 0.939587 0.689197 0.7534
1856 1 0.873771 0.683549 0.818779
1885 1 0.875026 0.748425 0.744299
1886 1 0.93935 0.808856 0.749772
1887 1 0.940536 0.750408 0.805916
1888 1 0.875129 0.805023 0.810404
2035 1 0.556951 0.875988 0.935812
2005 1 0.625805 0.757625 0.872902
2034 1 0.559633 0.939547 0.881616
849 1 0.505127 0.250352 0.750096
787 1 0.558821 0.00642062 0.813555
789 1 0.622831 1.00284 0.751289
659 1 0.562421 0.00203701 0.68384
1779 1 0.561884 0.873383 0.688255
1784 1 0.622887 0.941366 0.686612
1906 1 0.55966 0.940337 0.744563
1907 1 0.565816 0.875564 0.817619
1909 1 0.622652 0.876514 0.748345
1912 1 0.617159 0.944797 0.813923
597 1 0.62102 0.252387 0.500119
791 1 0.688918 0.000783335 0.812899
793 1 0.750694 0.997785 0.747805
1783 1 0.686848 0.871823 0.684479
1787 1 0.811307 0.874835 0.685088
1788 1 0.748542 0.938221 0.682997
1910 1 0.691385 0.936201 0.755806
1911 1 0.688208 0.866743 0.81146
1913 1 0.751226 0.877366 0.749807
1914 1 0.811462 0.937178 0.748253
1915 1 0.812733 0.869273 0.814524
1916 1 0.757647 0.936847 0.811652
676 1 0.999854 0.182777 0.685994
1791 1 0.936226 0.870022 0.688916
1792 1 0.874802 0.940387 0.685478
1917 1 0.870605 0.870524 0.748474
1918 1 0.933182 0.937886 0.750115
1919 1 0.936879 0.874111 0.810951
1920 1 0.871283 0.933245 0.813103
2037 1 0.628068 0.879637 0.873067
569 1 0.743699 0.122476 0.501843
2040 1 0.621598 0.93978 0.937904
914 1 0.560943 0.0674181 0.878163
920 1 0.625091 0.0611309 0.939245
947 1 0.568297 0.128436 0.942293
949 1 0.622393 0.128463 0.877283
2002 1 0.563369 0.814542 0.875334
1604 1 0.99996 0.813945 0.563422
923 1 0.805848 0.0017076 0.938255
921 1 0.744716 1.00342 0.877845
918 1 0.684697 0.0644241 0.878342
922 1 0.814268 0.0646317 0.874942
924 1 0.749963 0.0645557 0.938535
951 1 0.688286 0.12416 0.944795
953 1 0.74636 0.128287 0.875295
955 1 0.810841 0.12852 0.94104
2008 1 0.629585 0.814612 0.940006
925 1 0.870467 1.00266 0.877614
927 1 0.937247 0.00326639 0.93371
926 1 0.939468 0.0634824 0.87061
928 1 0.87102 0.0642136 0.936497
957 1 0.875367 0.120898 0.87383
959 1 0.934677 0.120527 0.936538
1978 1 0.809506 0.682327 0.879261
724 1 0.503809 0.318288 0.689616
2007 1 0.683377 0.744481 0.939626
2012 1 0.747433 0.80837 0.938459
917 1 0.624562 0.00574954 0.874169
2011 1 0.81626 0.745914 0.935858
1974 1 0.685672 0.686945 0.87286
1732 1 1.003 0.811539 0.690171
981 1 0.629624 0.252615 0.877941
984 1 0.626765 0.316597 0.939235
946 1 0.566239 0.186211 0.876354
952 1 0.627737 0.186192 0.941404
978 1 0.570169 0.317655 0.873013
979 1 0.567208 0.252272 0.940322
2009 1 0.745983 0.746627 0.877851
753 1 0.501764 0.375525 0.624536
986 1 0.813331 0.3068 0.875724
982 1 0.687458 0.312745 0.875132
987 1 0.810207 0.24842 0.939045
988 1 0.749635 0.31242 0.937339
983 1 0.68818 0.24945 0.950164
954 1 0.809998 0.186491 0.876473
985 1 0.748975 0.252041 0.875545
956 1 0.748851 0.183594 0.941937
950 1 0.68887 0.187529 0.878488
1949 1 0.87815 0.497724 0.880476
960 1 0.876985 0.187249 0.940719
991 1 0.936117 0.250267 0.941917
990 1 0.939163 0.316104 0.871925
992 1 0.877545 0.312238 0.940899
989 1 0.87271 0.247128 0.882044
958 1 0.936924 0.183863 0.878661
2006 1 0.688937 0.808202 0.877927
1980 1 0.744133 0.686892 0.938205
1141 1 0.626704 0.876756 1.0024
1016 1 0.62572 0.437293 0.937641
1013 1 0.631051 0.371723 0.87399
1011 1 0.562207 0.376738 0.938612
1010 1 0.56888 0.439529 0.872285
1941 1 0.625402 0.500207 0.875065
2010 1 0.814097 0.807761 0.876261
1982 1 0.940689 0.684629 0.88008
1020 1 0.748326 0.437711 0.937416
1019 1 0.810363 0.371185 0.935978
1018 1 0.814029 0.439091 0.875558
1017 1 0.750213 0.374236 0.873206
1014 1 0.689296 0.44098 0.875965
1015 1 0.688852 0.376123 0.937827
1943 1 0.686868 0.501243 0.937859
2015 1 0.937968 0.749522 0.939175
1984 1 0.877805 0.680759 0.938687
1761 1 1.00206 0.875274 0.629442
1024 1 0.87215 0.437322 0.938089
1023 1 0.934393 0.370703 0.941714
1022 1 0.942514 0.435539 0.874602
1021 1 0.8746 0.375117 0.877044
1951 1 0.941059 0.498927 0.936765
1950 1 0.940629 0.560551 0.872508
1796 1 1.00021 0.560498 0.814115
1976 1 0.624748 0.679174 0.934401
2013 1 0.878327 0.746568 0.876442
1625 1 0.750481 0.755356 0.501492
1939 1 0.563477 0.498332 0.939609
1938 1 0.560713 0.563156 0.87779
1944 1 0.627556 0.559827 0.942673
1971 1 0.563215 0.626392 0.937767
1973 1 0.62268 0.625045 0.87347
2016 1 0.872151 0.813186 0.938218
2014 1 0.938355 0.812786 0.876374
1947 1 0.81307 0.499735 0.939189
1945 1 0.747324 0.497183 0.873345
1979 1 0.810351 0.622731 0.940126
1942 1 0.68228 0.564514 0.872138
1946 1 0.815006 0.563061 0.877181
1948 1 0.747778 0.559284 0.941944
1975 1 0.685148 0.620222 0.937031
1977 1 0.751177 0.624491 0.874837
1983 1 0.938361 0.617133 0.936982
1981 1 0.879137 0.620079 0.874074
1952 1 0.874636 0.562907 0.93847
737 1 0.997941 0.373913 0.621445
1117 1 0.879044 0.744647 1.00546
1012 1 0.503286 0.436653 0.935444
884 1 0.505884 0.434705 0.816171
1118 1 0.93839 0.809876 0.997263
1588 1 0.499899 0.688481 0.563406
977 1 0.505543 0.25315 0.876481
833 1 0.998674 0.251342 0.750867
756 1 0.502317 0.436516 0.688344
1937 1 0.502036 0.499138 0.877494
1661 1 0.877441 0.878671 0.500302
1586 1 0.566439 0.688273 0.496768
1809 1 0.505914 0.493486 0.752876
2020 1 1.0002 0.940374 0.935096
836 1 1.00101 0.311922 0.811263
1554 1 0.564346 0.562004 0.498796
852 1 0.506821 0.310508 0.808788
1074 1 0.563563 0.687265 1.00346
1540 1 0.998003 0.558044 0.558027
673 1 1.00738 0.124951 0.623239
740 1 1.00124 0.43656 0.682158
548 1 0.998463 0.185125 0.564347
1654 1 0.689232 0.939878 0.499937
1658 1 0.808957 0.938682 0.507323
1585 1 0.504732 0.628264 0.496457
1082 1 0.814379 0.684687 1.00247
21 1 0.61988 0.000150999 1.00165
1110 1 0.690073 0.809348 1.00194
118 1 0.688528 0.438766 0.996812
1146 1 0.8104 0.939979 1.00041
1114 1 0.816671 0.81506 1.00052
113 1 0.4993 0.371679 0.998608
125 1 0.87007 0.371168 1.0022
1650 1 0.558143 0.937179 0.504975
1150 1 0.94005 0.938008 0.996478
1617 1 0.501506 0.753439 0.504246
606 1 0.935426 0.313179 0.50003
633 1 0.744173 0.381078 0.49725
1590 1 0.683331 0.692264 0.502539
1557 1 0.623861 0.500824 0.50135
26 1 0.808787 0.068226 1.00226
1622 1 0.685959 0.813626 0.507248
1594 1 0.815898 0.692924 0.504871
33 1 0.998463 0.122122 0.998915
574 1 0.935994 0.188305 0.502992
1598 1 0.93674 0.691651 0.503403
1138 1 0.561145 0.937173 0.995046
1142 1 0.684473 0.937888 1.00198
| [
"[email protected]"
] | |
c5764734108e5118eb033f9417b70073be8ac9a0 | 28541d61368a14a0d5003db4cc07fed21b40c41f | /Chapter-4/maze3.py | 2a2bcf9a00c029002b258874bd88cd10f9fc123a | [] | no_license | eizin6389/python_algorithm | 390861f9342ce907f2cda0b45b84d364bcba7541 | abf3588ed97a343b6559eb5d69156708d42bc243 | refs/heads/master | 2022-12-06T20:48:49.470312 | 2020-08-14T13:29:26 | 2020-08-14T13:29:26 | 282,905,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 796 | py | maze = [
[9,9,9,9,9,9,9,9,9,9,9,9],
[9,0,0,0,9,0,0,0,0,0,0,9],
[9,0,9,0,0,0,9,9,0,9,9,9],
[9,0,9,9,0,9,0,0,0,9,0,9],
[9,0,0,0,9,0,0,9,9,0,9,9],
[9,9,9,0,0,9,0,9,0,0,0,9],
[9,0,0,0,9,0,9,0,0,9,1,9],
[9,0,9,0,0,0,0,9,0,0,9,9],
[9,0,0,9,0,9,0,0,9,0,0,9],
[9,0,9,0,9,0,9,0,0,9,0,9],
[9,0,0,0,0,0,0,9,0,0,0,9],
[9,9,9,9,9,9,9,9,9,9,9,9]
]
dir = [[1,0],[0,1],[-1,0],[0,-1]]
x, y, depth, d = 1, 1, 0, 0
while maze[x][y] != 1:
maze[x][y] = 2
for i in range(len(dir)):
j = (d + i - 1) % len(dir)
if maze[x + dir[j][0]][y + dir[j][1]] < 2:
x += dir[j][0]
y += dir[j][1]
d = j
depth += 1
break
elif maze[x + dir[j][0]][y + dir[j][1]] == 2:
x += dir[j][0]
y += dir[j][1]
d = j
depth -= 1
break
print(depth)
| [
"[email protected]"
] | |
3eaa1551407f554655a52f1b22c4d721669fa579 | 3e6e18edfe81bb19e298ae4e1831cb76c2c6069d | /src/lpcshop/models/bottles.py | a4dba719454dd661eebe4d48daada55e5b64e9f8 | [] | no_license | libertalia/lpc | 2e72de7eee36cd92d62e4d250186bda2353c179a | 972343abdcffffc2bec0cac4e2057c91edfa1716 | refs/heads/master | 2023-01-07T08:13:02.708844 | 2016-05-13T01:34:57 | 2016-05-13T01:34:57 | 58,680,165 | 0 | 1 | null | 2022-12-26T19:59:29 | 2016-05-12T22:02:24 | JavaScript | UTF-8 | Python | false | false | 2,117 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.encoding import python_2_unicode_compatible
from djangocms_text_ckeditor.fields import HTMLField
from shop.money.fields import MoneyField
from shop.models.product import BaseProduct, BaseProductManager
from shop.models.defaults.mapping import ProductPage, ProductImage
@python_2_unicode_compatible
class Bottle(BaseProduct):
# common product fields
product_name = models.CharField(max_length=255, verbose_name=_("Product Name"))
slug = models.SlugField(verbose_name=_("Slug"))
unit_price = MoneyField(_("Unit price"), decimal_places=3,
help_text=_("Net price for this product"))
description = HTMLField(verbose_name=_("Description"),
help_text=_("Description for the list view of products."))
# controlling the catalog
order = models.PositiveIntegerField(verbose_name=_("Sort by"), db_index=True)
cms_pages = models.ManyToManyField('cms.Page', through=ProductPage,
help_text=_("Choose list view this product shall appear on."))
images = models.ManyToManyField('filer.Image', through=ProductImage)
objects = BaseProductManager()
# filter expression used to search for a product item using the Select2 widget
lookup_fields = ('product_name__icontains',)
class Meta:
verbose_name = _("Bottle")
ordering = ('order',)
def __str__(self):
return self.product_name
@property
def sample_image(self):
return self.images.first()
def get_price(self, request):
return self.unit_price
def get_absolute_url(self):
# sorting by highest level, so that the canonical URL associates with the
# most generic category
cms_page = self.cms_pages.order_by('depth').last()
if cms_page is None:
return urljoin('category-not-assigned', self.slug)
return urljoin(cms_page.get_absolute_url(), self.slug)
| [
"[email protected]"
] | |
07030cbb64db6488b93f8e7f03c975d1d39c099d | df5cd640098a10e754a9552187fc5ad8c50df90c | /colour/examples/algebra/examples_interpolation.py | 4acf509db6a9fd00459d7e4bce455a3a20c6b8ca | [
"BSD-3-Clause"
] | permissive | ofek/colour | d4963c9b77b0d119cf3ef3296dbf5369167472df | 04f4863ef49093a93244c1fedafd1d5e2b1b76da | refs/heads/develop | 2021-07-08T05:33:14.220392 | 2017-09-29T22:34:14 | 2017-09-29T22:34:14 | 105,406,461 | 0 | 0 | null | 2017-09-30T23:06:18 | 2017-09-30T23:06:18 | null | UTF-8 | Python | false | false | 3,265 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases interpolation computations.
"""
import pylab
import colour
from colour.plotting import * # noqa
from colour.utilities.verbose import message_box
message_box('Interpolation Computations')
message_box(('Comparing "Sprague (1880)" and "Cubic Spline" recommended '
'interpolation methods to "Pchip" method.'))
uniform_spd_data = {
340: 0.0000,
360: 0.0000,
380: 0.0000,
400: 0.0641,
420: 0.0645,
440: 0.0562,
460: 0.0537,
480: 0.0559,
500: 0.0651,
520: 0.0705,
540: 0.0772,
560: 0.0870,
580: 0.1128,
600: 0.1360,
620: 0.1511,
640: 0.1688,
660: 0.1996,
680: 0.2397,
700: 0.2852,
720: 0.0000,
740: 0.0000,
760: 0.0000,
780: 0.0000,
800: 0.0000,
820: 0.0000
}
non_uniform_spd_data = {
340.1: 0.0000,
360: 0.0000,
380: 0.0000,
400: 0.0641,
420: 0.0645,
440: 0.0562,
460: 0.0537,
480: 0.0559,
500: 0.0651,
520: 0.0705,
540: 0.0772,
560: 0.0870,
580: 0.1128,
600: 0.1360,
620: 0.1511,
640: 0.1688,
660: 0.1996,
680: 0.2397,
700: 0.2852,
720: 0.0000,
740: 0.0000,
760: 0.0000,
780: 0.0000,
800: 0.0000,
820.9: 0.0000
}
base_spd = colour.SpectralPowerDistribution('Reference', uniform_spd_data)
uniform_interpolated_spd = colour.SpectralPowerDistribution(
'Uniform - Sprague Interpolation', uniform_spd_data)
uniform_pchip_interpolated_spd = colour.SpectralPowerDistribution(
'Uniform - Pchip Interpolation', uniform_spd_data)
non_uniform_interpolated_spd = colour.SpectralPowerDistribution(
'Non Uniform - Cubic Spline Interpolation', non_uniform_spd_data)
uniform_interpolated_spd.interpolate(colour.SpectralShape(interval=1))
uniform_pchip_interpolated_spd.interpolate(
colour.SpectralShape(interval=1), method='Pchip')
non_uniform_interpolated_spd.interpolate(colour.SpectralShape(interval=1))
shape = base_spd.shape
x_limit_min, x_limit_max, y_limit_min, y_limit_max = [], [], [], []
pylab.plot(
base_spd.wavelengths,
base_spd.values,
'ro-',
label=base_spd.name,
linewidth=2)
pylab.plot(
uniform_interpolated_spd.wavelengths,
uniform_interpolated_spd.values,
label=uniform_interpolated_spd.name,
linewidth=2)
pylab.plot(
uniform_pchip_interpolated_spd.wavelengths,
uniform_pchip_interpolated_spd.values,
label=uniform_pchip_interpolated_spd.name,
linewidth=2)
pylab.plot(
non_uniform_interpolated_spd.wavelengths,
non_uniform_interpolated_spd.values,
label=non_uniform_interpolated_spd.name,
linewidth=2)
x_limit_min.append(shape.start)
x_limit_max.append(shape.end)
y_limit_min.append(min(base_spd.values))
y_limit_max.append(max(base_spd.values))
settings = {
'x_label':
'Wavelength $\\lambda$ (nm)',
'y_label':
'Spectral Power Distribution',
'x_tighten':
True,
'legend':
True,
'legend_location':
'upper left',
'x_ticker':
True,
'y_ticker':
True,
'limits': (min(x_limit_min), max(x_limit_max), min(y_limit_min),
max(y_limit_max))
}
boundaries(**settings)
decorate(**settings)
display(**settings)
| [
"[email protected]"
] | |
467775b4bd0bdc529f7af369a772db9776c3f4d4 | 0b793bce2da8c3d09b7956c0672ddbffd46feaed | /atcoder/corp/dwacon6_a.py | 0f60c706492fb0f7e55329255dd53fcbe06cb6d9 | [
"MIT"
] | permissive | knuu/competitive-programming | c6c4e08fb231937d988bdc5a60a8ad6b31b97616 | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | refs/heads/master | 2021-01-17T09:39:02.647688 | 2020-11-07T03:17:22 | 2020-11-07T03:17:22 | 27,886,732 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | N = int(input())
titles, times = [], []
for _ in range(N):
s, t = input().split()
titles.append(s)
times.append(int(t))
idx = titles.index(input())
ans = 0
for i in range(idx+1, N):
ans += times[i]
print(ans)
| [
"[email protected]"
] | |
396451adf046ae9a1e9a93d08c731002c02b4a78 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AntMerchantExpandIndirectOnlineModifyResponse.py | deb575d50e426359ce6993ae14225946249fa464 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 805 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AntMerchantExpandIndirectOnlineModifyResponse(AlipayResponse):
def __init__(self):
super(AntMerchantExpandIndirectOnlineModifyResponse, self).__init__()
self._sub_merchant_id = None
@property
def sub_merchant_id(self):
return self._sub_merchant_id
@sub_merchant_id.setter
def sub_merchant_id(self, value):
self._sub_merchant_id = value
def parse_response_content(self, response_content):
response = super(AntMerchantExpandIndirectOnlineModifyResponse, self).parse_response_content(response_content)
if 'sub_merchant_id' in response:
self.sub_merchant_id = response['sub_merchant_id']
| [
"[email protected]"
] | |
4b664002f3b91925204f95cf5afde92db89ca9f4 | 154e563104144721865a90987db0332bef08a4c3 | /rh_aligner/plotting/__init__.py | 8187d6023c6db7b60a29f9fbf00456387099c256 | [
"MIT"
] | permissive | Rhoana/rh_aligner | 565572d645769053c74a36ddf0f53ecc20d997fe | baab698f6520b9b999bccf423dc510b0c8f4b9bb | refs/heads/master | 2021-01-01T05:29:25.406459 | 2016-05-09T15:34:58 | 2016-05-09T15:34:58 | 56,165,015 | 3 | 3 | null | 2016-05-05T20:00:26 | 2016-04-13T15:43:33 | Python | UTF-8 | Python | false | false | 281 | py | """
Plotting of the stitching and alignment steps library
- to deubg the steps
"""
from .view_pre_pmcc_mfov import view_pre_pmcc_mfov
from .view_post_pmcc_mfov import view_post_pmcc_mfov
__all__ = [
'view_pre_pmcc_mfov',
'view_post_pmcc_mfov'
]
| [
"[email protected]"
] | |
25824908e100267109197ad1c04cca8d349a6f10 | 8cf0cf9b71b7c5fbaa150e9893bf461ef661045e | /ownblock/ownblock/apps/parking/models.py | 84c75498d4e8e94365b81a282ee43d877a925a7d | [
"MIT"
] | permissive | danjac/ownblock | 676b27a5aa0d4ce2ac2cd924a632489cd6fc21ee | ac662fb7efb2f04567e2f85638c1250286452611 | refs/heads/master | 2016-08-02T21:51:56.055598 | 2015-05-02T12:54:47 | 2015-05-02T12:54:47 | 34,940,828 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 875 | py | from django.conf import settings
from django.db import models
from django_countries.fields import CountryField
class Vehicle(models.Model):
description = models.CharField(max_length=100)
registration_number = models.CharField(max_length=12)
country = CountryField(default="FI")
resident = models.ForeignKey(settings.AUTH_USER_MODEL)
reserved_place = models.CharField(max_length=12, blank=True)
def __str__(self):
return self.registration_number
def get_groups(self):
return [self.resident,
self.resident.apartment.building.site.group]
def has_permission(self, user, perm):
if user.role == 'resident':
return user == self.resident
if user.role == 'manager':
return (self.resident.apartment.building.site_id ==
user.site_id)
return False
| [
"[email protected]"
] | |
fd6eb4ffc23f7389f26fd2d60442434609b29286 | 5f814192b19721dc9c06e0e9595738b0f8561233 | /OCR/east_text_detection.py | f8bee5a5e3d5d1b813617866d1b192837295a2ef | [] | no_license | irischo/civil_translation | 7b3c5c58e201f74547d5ae21123fdfd9d4bc5e64 | 240638a434957ea25cfac262da93fc23e292f6f2 | refs/heads/master | 2022-11-23T15:32:50.503095 | 2020-07-29T00:40:06 | 2020-07-29T00:40:06 | 283,387,321 | 0 | 0 | null | 2020-07-29T03:21:50 | 2020-07-29T03:21:49 | null | UTF-8 | Python | false | false | 2,829 | py | from imutils.object_detection import non_max_suppression
import numpy as np
import argparse
import time
import cv2
# argument parse
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--image', type=str, help='path to input image')
ap.add_argument('-east', '--east', type=str, help='path to input EAST text detector')
ap.add_argument('-c', '--min-confidence', type=float, default=0.5, help='minimum probability required to inspect a region')
ap.add_argument('-w', '--width', type=int, default=320, help='resized image width (should be multiple of 32)')
ap.add_argument('-e', '--height', type=int, default=320, help='resized image height (should be multiple of 32)')
args = vars(ap.parse_args())
# load image
image = cv2.imread(args['image'])
orig = image.copy()
(H, W) = image.shape[:2]
# set new width & height
(newW, newH) = (args['width'], args['height'])
rW = W / float(newW)
rH = H / float(newH)
# resize image
image = cv2.resize(image, (newW, newH))
(H, W) = image.shape[:2]
layerNames = [
'feature_fusion/Conv_7/Sigmoid', # text or not check
'feature_fusion/concat_3' # image geometry
]
# load pre-trained EAST text decorator (from frozen_east_text_detection.pb)
print('[INFO] loading EAST text detector ...')
net = cv2.dnn.readNet(args['east'])
blob = cv2.dnn.blobFromImage(image, 1.0, (W, H), (123.68, 116.78, 103.94), swapPB=True, crop=False)
start = time.time()
net.setInput(blob)
(scores, geometry) = net.forward(layerNames)
end = time.time()
# show timing information on text prediction
print("[INFO] text detection took {:.6f} seconds".format(end - start))
(numRows, numCols) = scores.shape[2:4]
rects = []
confidences = []
for y in range(0, numRows):
scoresData = scores[0, 0, y]
xData0 = geometry[0, 0, y]
xData1 = geometry[0, 1, y]
xData2 = geometry[0, 2, y]
xData3 = geometry[0, 3, y]
anglesData = geometry[0, 4, y]
for x in range(0, numCols):
if scoresData[x] < args['min_confidence']:
continue
(offsetX, offsetY) = (x * 4.0, y * 4.0)
angle = anglesData[x]
cos = np.cos(angle)
sin = np.sin(angle)
h = xData0[x] + xData2[x]
w = xData1[x] + xData3[x]
endX = int(offsetX + (cos * xData1[x]) + (sin * xData2[x]))
endY = int(offsetY - (sin * xData1[x]) + (cos * xData2[x]))
startX = int(endX - w)
startY = int(endY - h)
rects.append((startX, startY, endX, endY))
confidences.append(scoresData[x])
boxes = non_max_suppression(np.array(rects), probs=confidences)
for (startX, startY, endX, endY) in boxes:
startX = int(startX * rW)
startY = int(startY * rH)
endX = int(endX * rW)
endY = int(endY * rH)
cv2.rectangle(orig, (startX, startY), (endX, endY), (0, 255, 0), 2)
cv2.imshow('Text Detection', orig)
cv2.waitKey(0)
| [
"[email protected]"
] | |
564f224574f406c1a966ab5582a316627e5a9ae1 | 2cfa657fd119a23de2a5c2ae6d55e6d2516bae2d | /test/functional/wallet_keypool_topup.py | 1c1aa4fe3a776fdc70d840768a3b9deacdbccf53 | [
"MIT"
] | permissive | vivuscoin/vivuscoin | 640b10ae3a72c03b501e03b07caae09ce6c87c81 | ba0db89712234bf68b2d6b63ef2c420d65c7c25d | refs/heads/master | 2023-05-07T06:26:26.241247 | 2021-05-25T03:54:32 | 2021-05-25T03:54:32 | 362,198,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,779 | py | #!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Copyright (c) 2021 The Vivuscoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test HD Wallet keypool restore function.
Two nodes. Node1 is under test. Node0 is providing transactions and generating blocks.
- Start node1, shutdown and backup wallet.
- Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110.
- Stop node1, clear the datadir, move wallet file back into the datadir and restart node1.
- connect node1 to node0. Verify that they sync and node1 receives its funds."""
import os
import shutil
from test_framework.test_framework import VivuscoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
sync_blocks,
)
class KeypoolRestoreTest(VivuscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 4
self.extra_args = [[], ['-keypool=100'], ['-keypool=100'], ['-keypool=100']]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
wallet_path = os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat")
wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak")
self.nodes[0].generate(101)
self.log.info("Make backup of wallet")
self.stop_node(1)
shutil.copyfile(wallet_path, wallet_backup_path)
self.start_node(1, self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 0, 2)
connect_nodes_bi(self.nodes, 0, 3)
for i, output_type in enumerate(["legacy", "p2sh-segwit", "bech32"]):
self.log.info("Generate keys for wallet with address type: {}".format(output_type))
idx = i+1
for _ in range(90):
addr_oldpool = self.nodes[idx].getnewaddress(address_type=output_type)
for _ in range(20):
addr_extpool = self.nodes[idx].getnewaddress(address_type=output_type)
# Make sure we're creating the outputs we expect
address_details = self.nodes[idx].validateaddress(addr_extpool)
if i == 0:
assert(not address_details["isscript"] and not address_details["iswitness"])
elif i == 1:
assert(address_details["isscript"] and not address_details["iswitness"])
else:
assert(not address_details["isscript"] and address_details["iswitness"])
self.log.info("Send funds to wallet")
self.nodes[0].sendtoaddress(addr_oldpool, 10)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(addr_extpool, 5)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.log.info("Restart node with wallet backup")
self.stop_node(idx)
shutil.copyfile(wallet_backup_path, wallet_path)
self.start_node(idx, self.extra_args[idx])
connect_nodes_bi(self.nodes, 0, idx)
self.sync_all()
self.log.info("Verify keypool is restored and balance is correct")
assert_equal(self.nodes[idx].getbalance(), 15)
assert_equal(self.nodes[idx].listtransactions()[0]['category'], "receive")
# Check that we have marked all keys up to the used keypool key as used
assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
if __name__ == '__main__':
KeypoolRestoreTest().main()
| [
"[email protected]"
] | |
12554a6f358810f3d7bcf732d99807639d1b65bf | 22ebdd6881730a9474ede8e8167c615990c4e275 | /prob17a.py | e5cae5301008b8a21864cb95ac76154a72222942 | [] | no_license | MMohan1/eueler | a96a465b265334b03645f2e2bb66c85395c54e75 | 05a88f1c9b41fbc3d6bcd95b38b83a6510b3b50a | refs/heads/master | 2021-01-18T15:14:35.320214 | 2015-02-02T11:02:06 | 2015-02-02T11:02:06 | 15,935,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | def prob17():
list1=[]
total_char=0
dict1={0:'',1:'one',2:'two',3:'three',4:'four',5:'five',6:'six',7:'seven',8:'eight',9:'nine',10:'ten'}
dict2={11:'eleven',12:'twelve',13:'thirteen',14:'fourteen',15:'fifteen',16:'sixteen',17:'seventeen',18:'eighteen',19:'nineteen'}
dict3={0:'',1:'ten',2:'twenty',3:'thirty',4:'fourty',5:'fifty',6:'sixty',7:'seventy',8:'eighty',9:'ninty'}
for i in range(1,100):
x=str(i)
if len(x) == 1:
list1.append(dict1[i])
elif len(x) == 2 and x[1] == '0':
list1.append(dict3[int(x[0])])
elif len(x) == 2 and x[0] == '1':
list1.append(dict2[i])
elif len(x) == 2:
list1.append(dict3[int(x[0])]+dict1[int(x[1])])
p = sum([len(i) for i in list1])
print list1,p
k = 3*((13*99)+p) + 3*((14*99)+p) + 3*((15*99)+p) + len('onethousand') + p + 99
print k
if __name__ == '__main__':
prob17()
| [
"[email protected]"
] | |
dada702324b30a4d4a00d067c7b3c97d8b05129b | b8ef1a5cd3856a8e9134c3313a4e23522f199df7 | /Baekjoon/1966_프린터 큐/1966_프린터 큐.py | 73d38dbd9d6bdf4902738765108954a7e7151128 | [] | no_license | scl2589/Algorithm_problem_solving | 910623d9675ae0219320abfd1fefc7d576027544 | 80db697cdd0180a7d4dbcfae4944d4a54191bddf | refs/heads/master | 2023-07-29T10:56:38.225206 | 2021-09-11T13:50:46 | 2021-09-11T13:50:46 | 235,363,353 | 0 | 0 | null | 2021-03-04T15:39:41 | 2020-01-21T14:36:41 | Python | UTF-8 | Python | false | false | 548 | py | from collections import deque
tc = int(input())
for _ in range(tc):
N, M = map(int, input().split())
impt = list(map(int, input().split()))
q = deque()
for idx, value in enumerate(impt):
q.append([idx, value])
count = 0
while True:
max_num = sorted(q, key = lambda x: x[1], reverse=True)[0][1]
num = q.popleft()
if num[0] == M and num[1] == max_num:
break
elif num[1] == max_num:
count += 1
else:
q.append(num)
print(count + 1)
| [
"[email protected]"
] | |
4e6dc77e570b5419eef0fc74fd16710afdfd3235 | 190d03cf370844548b9e8c89952dfbaec4d0c5c8 | /p103.py | 467aee99fa0ff340b0a00d481a047ab36a7d0d52 | [] | no_license | alainlou/leetcode | 446d101a9fd2f9eaa2229252e5909e7df36b4a74 | fe500bcb067be59aa048259e3860e9da6f98344d | refs/heads/master | 2022-10-16T12:20:44.726963 | 2022-09-18T15:29:05 | 2022-09-18T15:29:05 | 178,775,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | from DS.TreeNode import TreeNode
class Solution:
def zigzagLevelOrder(self, root: TreeNode) -> List[List[int]]:
if root is None:
return []
ans = []
q = [(root, 0)]
while len(q) > 0:
curr = q.pop(0)
if len(ans) <= curr[1]:
ans.append([])
ans[-1].append(curr[0].val)
if curr[0].left:
q.append((curr[0].left, curr[1]+1))
if curr[0].right:
q.append((curr[0].right, curr[1]+1))
for i in range(1, len(ans), 2):
ans[i] = ans[i][::-1]
return ans
| [
"[email protected]"
] | |
5df906375ee0c7d24ede8dd570122ce0cbdd1251 | 9bdc5bd0b6195761fbceed17c0725bc48a5941a1 | /testing/keras_taylor_1D.py | f24a460e11be29c65a55abbe497af20fe014f122 | [] | no_license | lapsintra/htt-ml | bc6bbb12eda4a3f0abbc5c0db13940a31b667a08 | ce07cad6fcc8625b1595157de6486759b74f6d62 | refs/heads/master | 2020-04-05T16:29:29.858916 | 2018-12-04T19:32:10 | 2018-12-04T19:32:10 | 157,015,043 | 0 | 0 | null | 2018-11-10T19:38:56 | 2018-11-10T19:38:56 | null | UTF-8 | Python | false | false | 7,871 | py | #!/usr/bin/env python
import ROOT
ROOT.PyConfig.IgnoreCommandLineOptions = True # disable ROOT internal argument parser
import argparse
from array import array
import yaml
import pickle
import numpy as np
import os
import sys
import matplotlib as mpl
mpl.use('Agg')
mpl.rcParams['font.size'] = 16
import matplotlib.pyplot as plt
from matplotlib import cm
from keras.models import load_model
import tensorflow as tf
from tensorflow_derivative.inputs import Inputs
from tensorflow_derivative.outputs import Outputs
from tensorflow_derivative.derivatives import Derivatives
import logging
logger = logging.getLogger("keras_taylor_1D")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter("%(name)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
def parse_arguments():
parser = argparse.ArgumentParser(description="Produce confusion matrice")
parser.add_argument("config_training", help="Path to training config file")
parser.add_argument("config_testing", help="Path to testing config file")
parser.add_argument("fold", type=int, help="Trained model to be tested.")
parser.add_argument(
"--no-abs",
action="store_true",
default=False,
help="Do not use abs for metric.")
parser.add_argument(
"--normalize",
action="store_true",
default=False,
help="Normalize rows.")
return parser.parse_args()
def parse_config(filename):
logger.debug("Load config %s.", filename)
return yaml.load(open(filename, "r"))
def main(args, config_test, config_train):
# Load preprocessing
path = os.path.join(config_train["output_path"],
config_test["preprocessing"][args.fold])
logger.info("Load preprocessing %s.", path)
preprocessing = pickle.load(open(path, "rb"))
# Load Keras model
path = os.path.join(config_train["output_path"],
config_test["model"][args.fold])
logger.info("Load keras model %s.", path)
model_keras = load_model(path)
# Get TensorFlow graph
inputs = Inputs(config_train["variables"])
try:
sys.path.append("htt-ml/training")
import keras_models
except:
logger.fatal("Failed to import Keras models.")
raise Exception
try:
name_keras_model = config_train["model"]["name"]
model_tensorflow_impl = getattr(
keras_models, config_train["model"]["name"] + "_tensorflow")
except:
logger.fatal(
"Failed to load TensorFlow version of Keras model {}.".format(
name_keras_model))
raise Exception
model_tensorflow = model_tensorflow_impl(inputs.placeholders, model_keras)
outputs = Outputs(model_tensorflow, config_train["classes"])
sess = tf.Session()
sess.run(tf.global_variables_initializer())
# Get operations for first-order derivatives
deriv_ops = {}
derivatives = Derivatives(inputs, outputs)
for class_ in config_train["classes"]:
deriv_ops[class_] = []
for variable in config_train["variables"]:
deriv_ops[class_].append(derivatives.get(class_, [variable]))
# Loop over testing dataset
path = os.path.join(config_train["datasets"][(1, 0)[args.fold]])
logger.info("Loop over test dataset %s to get model response.", path)
file_ = ROOT.TFile(path)
mean_abs_deriv = {}
for i_class, class_ in enumerate(config_train["classes"]):
logger.debug("Process class %s.", class_)
tree = file_.Get(class_)
if tree == None:
logger.fatal("Tree %s does not exist.", class_)
raise Exception
values = []
for variable in config_train["variables"]:
typename = tree.GetLeaf(variable).GetTypeName()
if typename == "Float_t":
values.append(array("f", [-999]))
elif typename == "Int_t":
values.append(array("i", [-999]))
else:
logger.fatal("Variable {} has unknown type {}.".format(variable, typename))
raise Exception
tree.SetBranchAddress(variable, values[-1])
if tree.GetLeaf(variable).GetTypeName() != "Float_t":
logger.fatal("Weight branch has unkown type.")
raise Exception
weight = array("f", [-999])
tree.SetBranchAddress(config_test["weight_branch"], weight)
deriv_class = np.zeros((tree.GetEntries(),
len(config_train["variables"])))
weights = np.zeros((tree.GetEntries()))
for i_event in range(tree.GetEntries()):
tree.GetEntry(i_event)
# Preprocessing
values_stacked = np.hstack(values).reshape(1, len(values))
values_preprocessed = preprocessing.transform(values_stacked)
# Keras inference
response = model_keras.predict(values_preprocessed)
response_keras = np.squeeze(response)
# Tensorflow inference
response = sess.run(
model_tensorflow,
feed_dict={
inputs.placeholders: values_preprocessed
})
response_tensorflow = np.squeeze(response)
# Check compatibility
mean_error = np.mean(np.abs(response_keras - response_tensorflow))
if mean_error > 1e-5:
logger.fatal(
"Found mean error of {} between Keras and TensorFlow output for event {}.".
format(mean_error, i_event))
raise Exception
# Calculate first-order derivatives
deriv_values = sess.run(
deriv_ops[class_],
feed_dict={
inputs.placeholders: values_preprocessed
})
deriv_values = np.squeeze(deriv_values)
deriv_class[i_event, :] = deriv_values
# Store weight
weights[i_event] = weight[0]
if args.no_abs:
mean_abs_deriv[class_] = np.average((deriv_class), weights=weights, axis=0)
else:
mean_abs_deriv[class_] = np.average(np.abs(deriv_class), weights=weights, axis=0)
# Normalize rows
classes = config_train["classes"]
matrix = np.vstack([mean_abs_deriv[class_] for class_ in classes])
if args.normalize:
for i_class, class_ in enumerate(classes):
matrix[i_class, :] = matrix[i_class, :] / np.sum(
matrix[i_class, :])
# Plotting
variables = config_train["variables"]
plt.figure(0, figsize=(len(variables), len(classes)))
axis = plt.gca()
for i in range(matrix.shape[0]):
for j in range(matrix.shape[1]):
axis.text(
j + 0.5,
i + 0.5,
'{:.2f}'.format(matrix[i, j]),
ha='center',
va='center')
q = plt.pcolormesh(matrix, cmap='Wistia')
#cbar = plt.colorbar(q)
#cbar.set_label("mean(abs(Taylor coefficients))", rotation=270, labelpad=20)
plt.xticks(
np.array(range(len(variables))) + 0.5, variables, rotation='vertical')
plt.yticks(
np.array(range(len(classes))) + 0.5, classes, rotation='horizontal')
plt.xlim(0, len(config_train["variables"]))
plt.ylim(0, len(config_train["classes"]))
output_path = os.path.join(config_train["output_path"],
"fold{}_keras_taylor_1D.png".format(args.fold))
logger.info("Save plot to {}.".format(output_path))
plt.savefig(output_path, bbox_inches='tight')
if __name__ == "__main__":
args = parse_arguments()
config_test = parse_config(args.config_testing)
config_train = parse_config(args.config_training)
main(args, config_test, config_train)
| [
"[email protected]"
] | |
fc07829f755d7e6cdcfbb45f1595dfd39618bdaa | 6d69b249a81e076d79787dd08eb8957908052052 | /libs/parse/sections/grouper_mixins/blocks.py | 154423ab5e2c3ebe6544db079d4af4ebccbedda5 | [] | no_license | 2vitalik/wiktionary | 02ee1f1327c3b82fc7b4d7da12083b1431b1eb8b | 8edae2f7dcf9089084c5ce7033c4fb0b454f4dfa | refs/heads/master | 2023-02-06T11:28:41.554604 | 2023-02-05T22:49:01 | 2023-02-05T22:49:01 | 121,025,447 | 7 | 2 | null | 2021-10-13T17:36:32 | 2018-02-10T15:06:24 | Lua | UTF-8 | Python | false | false | 486 | py | from libs.parse.groupers.sections.blocks.any_blocks import AnyBlocksGrouper
from libs.parse.groupers.sections.blocks.blocks import BlocksGrouper
from libs.parse.sections.grouper_mixins.sub_blocks import SubBlocksGroupersMixin
from libs.parse.utils.decorators import parsed
class BlocksGroupersMixin(SubBlocksGroupersMixin):
@property
@parsed
def blocks(self):
return BlocksGrouper(self)
@parsed
def any_blocks(self):
return AnyBlocksGrouper(self)
| [
"[email protected]"
] | |
2e6669b326e3edda7be0bf7c377f290405bcf0c3 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2671/60590/241334.py | 25e5fee553d7db62878bfa4165d65ebdfde7331c | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | def cal(w):
if w==1:
return 2
if w==2:
return 3
elif w==3:
return 5
else:
w2=2
w3=3
for i in range(w-2):
temp=w2+w3
w2=w3
w3=temp
return temp
t = int(input())
for i in range(t):
w=int(input())
print(2**w-cal(w)) | [
"[email protected]"
] | |
e1267a54f015e66aaf57df060a0ebb302d36b67e | 8d4f26bccc3b016cf45e8270df617cea73d1a741 | /utils/transforms.py | 4a3b38f5fe3802f8719e83a981a8f9c1740e3a2c | [] | no_license | krylatov-pavel/aibolit-ECG | 3b6e4fc8d87ada6a615038c7fb94048570af2d43 | 27bad875981547ea93ac0088518eb29149078988 | refs/heads/master | 2022-12-26T05:49:30.827061 | 2019-08-19T10:47:20 | 2019-08-19T10:47:20 | 191,343,111 | 1 | 0 | null | 2022-12-08T05:56:08 | 2019-06-11T09:53:21 | Python | UTF-8 | Python | false | false | 631 | py | import torch
from torchvision import transforms
def squeeze(x):
return torch.squeeze(x, dim=0)
def clip_fn(min, max):
def clip(x):
x = torch.clamp(x, min, max)
return x
return clip
def scale_fn(min, max, a, b):
def scale(x):
x = ((b - a) * (x - min) / (max - min)) + a
return x
return scale
def get_transform():
clip = clip_fn(-19, 21)
scale = scale_fn(-19, 21, 0, 5)
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Lambda(clip),
transforms.Lambda(scale),
transforms.Lambda(squeeze)
])
return transform | [
"[email protected]"
] | |
8abbdd180f33166add8aa0e2afc8656a3e61eb68 | 198dd2fd5b2aa27b950bd5844c97a1ebdbd3af17 | /dephell/repositories/_local.py | 90ceb7d1f9c1045a3bc1c9a026ebe0a9eea2cb71 | [
"MIT"
] | permissive | espdev/dephell | 68411b20c1830836dcea0eec96a8bd15e95171d5 | 17d5604e7b443b4d58bffc635a139adb49431efc | refs/heads/master | 2020-11-26T01:05:07.580285 | 2019-12-20T14:29:07 | 2019-12-20T14:29:07 | 228,915,765 | 0 | 0 | MIT | 2019-12-18T20:24:21 | 2019-12-18T20:24:20 | null | UTF-8 | Python | false | false | 4,172 | py | # built-in
from datetime import datetime
from pathlib import Path
from typing import Optional, Tuple, Union
# app
from ..cache import RequirementsCache
from ..config import Config
from ..constants import FILES
from ..models.release import Release
from ._warehouse import WarehouseLocalRepo
from .base import Interface
class LocalRepo(Interface):
def __init__(self, path: Union[Path, str]):
if type(path) is str:
path = Path(path)
self.path = path
def get_releases(self, dep) -> Tuple[Release, ...]:
releases = []
dist_path = (self.path / 'dist')
if dist_path.exists():
repo = WarehouseLocalRepo(name='tmp', path=dist_path)
releases = list(repo.get_releases(dep=dep))
root = self.get_root(name=dep.name, version='0.0.0')
self.update_dep_from_root(dep=dep, root=root)
releases.append(Release(
raw_name=root.raw_name,
version=root.version,
time=datetime.fromtimestamp(self.path.stat().st_mtime),
))
return tuple(reversed(releases))
async def get_dependencies(self, name: str, version: str, extra: Optional[str] = None) -> tuple:
cache = RequirementsCache('local', 'deps', name, str(version))
deps = cache.load()
if deps:
return deps
root = self.get_root(name=name, version=version)
deps = root.dependencies
if extra:
deps = tuple(dep for dep in deps if extra in dep.envs)
cache.dump(root=root)
return deps
def get_root(self, name: str, version: str):
from ..converters import EggInfoConverter, SDistConverter, WheelConverter, CONVERTERS
if not self.path.exists():
raise FileNotFoundError(str(self.path))
# load from file
if self.path.is_file():
for converter in CONVERTERS.values():
if converter.can_parse(path=self.path):
return converter.load(path=self.path)
raise LookupError('cannot find loader for file ' + str(self.path))
# get from wheel or sdist
patterns = (
('-*-*-*.whl', WheelConverter()),
('.tar.gz', SDistConverter()),
('.tgz', SDistConverter()),
)
for suffix, converter in patterns:
paths = tuple(self.path.glob('**/{name}-{version}{suffix}'.format(
name=name.replace('-', '_'),
version=str(version),
suffix=suffix,
)))
if paths:
path = min(paths, key=lambda path: len(path.parts))
return converter.load(path=path)
# read from egg-info
path = self.path / (name + '.egg-info')
if path.exists():
return EggInfoConverter().load(path=path)
# read from dephell config
path = self.path / 'pyproject.toml'
if path.exists():
config = Config().attach_file(path=path, env='main')
if config is not None:
section = config.get('to') or config.get('from')
if section and 'path' in section and 'format' in section:
converter = CONVERTERS[section['format']]
path = self.path.joinpath(section['path'])
return converter.load(path)
# get from dependencies file
for fname in FILES:
path = self.path / fname
if not path.exists():
continue
for converter in CONVERTERS.values():
if converter.can_parse(path=path):
return converter.load(path=path)
raise LookupError('cannot find dependencies in ' + str(self.path))
@staticmethod
def update_dep_from_root(dep, root) -> None:
if not dep.description:
dep.description = root.description
if not dep.authors:
dep.authors = root.authors
if not dep.links:
dep.links = root.links
if not dep.classifiers:
dep.classifiers = root.classifiers
if not dep.license:
dep.license = root.license
| [
"[email protected]"
] | |
635ba9cef3c47552319481c624406b556a3d4b17 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-sblp-obt/sblp_ut=3.5_rd=1_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=0/params.py | aff20aa8732740d3ec888ec12ac9538f67a70364 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.570619',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '1',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'RUN',
'trial': 0,
'utils': 'uni-medium-3'}
| [
"[email protected]"
] | |
0e056bd4bb9433bc9d9431ddbca1ea8006b3f319 | dcb57598b4b7f9fb7a25f847574b227824b5420c | /tests/integration_tests/test_features.py | 53172ef02b0bf7875a94cad0a5af4b9fa26394f8 | [
"MIT"
] | permissive | netor27/features-webapp | 473a0f393bea5d30d654d1880869fe8daebe4c5b | 801d582975de7734c90bf0a4c95f404cbf305e86 | refs/heads/master | 2021-05-11T07:36:06.494569 | 2018-05-08T17:54:15 | 2018-05-08T17:54:15 | 118,023,763 | 0 | 2 | MIT | 2018-05-08T17:54:16 | 2018-01-18T18:48:42 | Python | UTF-8 | Python | false | false | 17,571 | py | import pytest
from unittest import TestCase
from flask import url_for, json
from datetime import date
from web.server import create_app
from web.db import db
from web.status import status
from web.models import Feature
from tests.integration_tests.post_helpers import PostHelper
class FeaturesTests(TestCase):
@pytest.fixture(autouse=True)
def transact(self, request, configfile, waitForDb):
self.app = create_app(configfile, waitForDb)
self.test_client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
self.test_user_name = 'testuserusers'
self.test_user_password = 'T3s!p4s5w0RDd12#'
self.ph = PostHelper(self.test_client, self.test_user_name, self.test_user_password)
db.create_all()
yield
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_create_and_retrieve_feature(self):
"""
Ensure we can create a new Feature and then retrieve it
"""
# create our user so we can authenticate
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create a new feature, assert we receive a 201 http code and and assert there's only one Feature in the db
title = 'New Feature Title'
description = 'Description ' * 10
target_date = date(2018, 6, 15)
priority = 1
client = 'Client 1'
area = 'Billing'
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
self.assertEqual(Feature.query.count(), 1)
# check that the returned values in the post response are correct
post_res_data = json.loads(post_res.get_data(as_text=True))
self.assertEqual(post_res_data['title'], title)
self.assertEqual(post_res_data['description'], description)
self.assertEqual(post_res_data['target_date'], target_date.isoformat())
self.assertEqual(post_res_data['client_priority'], priority)
self.assertEqual(post_res_data['client']['name'], client)
self.assertEqual(post_res_data['area']['name'], area)
# get the new feature url, retrieve it and assert the correct values
feature_url = post_res_data['url']
res = self.test_client.get(
feature_url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK,
res.get_data(as_text=True))
self.assertEqual(res_data['title'], title)
self.assertEqual(res_data['description'], description)
self.assertEqual(res_data['target_date'], target_date.isoformat())
self.assertEqual(res_data['client_priority'], priority)
self.assertEqual(res_data['client']['name'], client)
self.assertEqual(res_data['area']['name'], area)
def test_retrieve_features_list(self):
"""
Ensure we can retrieve the features list
"""
# create our user so we can authenticate
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create 4 features and assert the response
for i in range(1, 5):
title = 'New Feature Title {}'.format(i)
description = 'Description {}'.format(i)
target_date = date(2018, 6, i)
priority = i
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# assert we only have this 4
self.assertEqual(Feature.query.count(), 4)
# retrieve the complete list of features, it should return only the 4 we created
url = url_for('api.featurelistresource', _external=True)
res = self.test_client.get(
url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK,
res.get_data(as_text=True))
self.assertEqual(res_data['count'], 4)
def test_update_feature(self):
"""
Ensure we can update the name for an existing feature
"""
# create our user so we can authenticate and create the feature
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create a new feature, assert we receive a 201 http code and and assert there's only one Feature in the db
title = 'New Feature Title'
description = 'Description ' * 10
target_date = date(2018, 6, 15)
priority = 1
client = 'Client 1'
area = 'Billing'
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
self.assertEqual(Feature.query.count(), 1)
post_res_data = json.loads(post_res.get_data(as_text=True))
# Create a new area and a new client, so we test we can update those too
area = "New Area"
res = self.ph.create_area(area)
self.assertEqual(res.status_code, status.HTTP_201_CREATED, res.get_data(as_text=True))
client = "New Client"
res = self.ph.create_client(client)
self.assertEqual(res.status_code, status.HTTP_201_CREATED, res.get_data(as_text=True))
# Create the patch request with the updated values
feature_url = post_res_data['url']
title = 'Updated Title'
description = 'Updated Description ' * 10
target_date = date(2018, 5, 19)
priority = 15
data = {'title': title, 'description': description, 'target_date': target_date.isoformat(),
'client_priority': priority, 'client': client, 'area': area}
patch_response = self.test_client.patch(
feature_url,
headers=self.ph.get_authentication_headers(),
data=json.dumps(data))
self.assertEqual(patch_response.status_code,
status.HTTP_200_OK, patch_response.get_data(as_text=True))
# retrieve the updated feature and validate the name is the same as the updated value
res = self.test_client.get(
feature_url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK, res.get_data(as_text=True))
self.assertEqual(res_data['title'], title)
self.assertEqual(res_data['description'], description)
self.assertEqual(res_data['target_date'], target_date.isoformat())
self.assertEqual(res_data['client_priority'], priority)
self.assertEqual(res_data['area']['name'], area)
self.assertEqual(res_data['client']['name'], client)
def test_features_priority_adjustment_when_adding_a_new_feature(self):
"""
Ensure that when creating a new feature that has the same priority as another one it should adjust the priorities
"""
# create our user so we can authenticate
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create 4 features and assert the response
for i in range(10):
title = 'Title {}'.format(i+1)
description = 'Description {}'.format(i+1)
target_date = date(2018, 6, 1)
priority = i+1
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(title, description, target_date, priority, client, area)
self.assertEqual(post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# assert we only have this 10 features (with priorities from 1 to 10)
self.assertEqual(Feature.query.count(), 10)
# create a new one with priority 5, so the service must update all the priorities that are higher than 5
title = 'New Feature'
description = 'Description'
target_date = date(2018, 6, i)
priority = 5
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(title, description, target_date, priority, client, area)
self.assertEqual(post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# Query all the priorities and verify they are updated correctly
url = url_for('api.featurelistresource', _external=True, page=1, size=11)
res = self.test_client.get(
url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK,
res.get_data(as_text=True))
self.assertEqual(res_data['count'], 11)
# because it's a new db for this test, the id should be the same as the priority before we updated them
features = res_data['results']
for i in range(11):
id = features[i]['id']
priority = features[i]['client_priority']
if id <= 4:
self.assertEqual(priority, id)
elif id == 11:
self.assertEqual(priority, 5)
else:
self.assertEqual(priority, id + 1)
def test_features_priority_adjustment_when_updating_an_existing_feature(self):
"""
Ensure that when updating a feature that has the same priority as another one it should adjust the priorities
"""
# create our user so we can authenticate
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create the first feature, that we will update later
title = 'Title 1'
description = 'Description 1'
target_date = date(2018, 6, 1)
priority = 1
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(title, description, target_date, priority, client, area)
res_data = json.loads(post_res.get_data(as_text=True))
self.assertEqual(post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
feature_url = res_data['url']
# create other 9 features and assert the response
for i in range(1,10):
title = 'Title {}'.format(i+1)
description = 'Description {}'.format(i+1)
target_date = date(2018, 6, 1)
priority = i+1
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(title, description, target_date, priority, client, area)
res_data = json.loads(post_res.get_data(as_text=True))
self.assertEqual(post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# assert we only have this 10 features (with priorities from 1 to 10)
self.assertEqual(Feature.query.count(), 10)
# update a feature with priority 1 to priority 2, so the service must update all the priorities that are higher or equal than 2
priority = 2
data = {'client_priority': priority}
patch_response = self.test_client.patch(
feature_url,
headers=self.ph.get_authentication_headers(),
data=json.dumps(data))
self.assertEqual(patch_response.status_code,
status.HTTP_200_OK, patch_response.get_data(as_text=True))
# Query all the priorities and verify they are updated correctly
url = url_for('api.featurelistresource', _external=True, page=1, size=10)
res = self.test_client.get(
url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK,
res.get_data(as_text=True))
self.assertEqual(res_data['count'], 10)
# because it's a new db for this test, the id should be the same as the priority before we updated them
features = res_data['results']
for i in range(10):
id = features[i]['id']
priority = features[i]['client_priority']
self.assertEqual(priority, id+1)
def test_retrieve_features_list_by_area(self):
"""
Ensure we can retrieve the features list for an area
"""
# create our user so we can authenticate
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create 4 features and assert the response
for i in range(1, 5):
title = 'New Feature Title {}'.format(i)
description = 'Description {}'.format(i)
target_date = date(2018, 6, i)
priority = i
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# create another 4 features but for another area
for i in range(1, 5):
title = 'New Feature Title {}'.format(i)
description = 'Description {}'.format(i)
target_date = date(2018, 6, i)
priority = i
client = "Client"
area = "Claims"
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# assert we only have this 8
self.assertEqual(Feature.query.count(), 8)
# retrieve the complete list of features for the first client, it should return only the 4 we created
url = url_for('api.featurelistbyarearesource', id = 1, _external=True)
res = self.test_client.get(
url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK,
res.get_data(as_text=True))
self.assertEqual(res_data['count'], 4)
def test_retrieve_features_list_by_client(self):
"""
Ensure we can retrieve the features list for a client
"""
# create our user so we can authenticate
res = self.ph.create_user(self.test_user_name, self.test_user_password)
self.assertEqual(res.status_code, status.HTTP_201_CREATED,
res.get_data(as_text=True))
# create 4 features and assert the response
for i in range(1, 5):
title = 'New Feature Title {}'.format(i)
description = 'Description {}'.format(i)
target_date = date(2018, 6, i)
priority = i
client = "Client"
area = "Billing"
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# create another 4 features but for another area
for i in range(1, 5):
title = 'New Feature Title {}'.format(i)
description = 'Description {}'.format(i)
target_date = date(2018, 6, i)
priority = i
client = "Client 2"
area = "Billing"
post_res = self.ph.create_feature(
title, description, target_date, priority, client, area)
self.assertEqual(
post_res.status_code, status.HTTP_201_CREATED, post_res.get_data(as_text=True))
# assert we only have this 8
self.assertEqual(Feature.query.count(), 8)
# retrieve the complete list of features for the first client, it should return only the 4 we created
url = url_for('api.featurelistbyclientresource', id = 1, _external=True)
res = self.test_client.get(
url,
headers=self.ph.get_authentication_headers())
res_data = json.loads(res.get_data(as_text=True))
self.assertEqual(res.status_code, status.HTTP_200_OK,
res.get_data(as_text=True))
self.assertEqual(res_data['count'], 4)
| [
"[email protected]"
] | |
81ead41af15f2e458481b49604b4fc00b30f8ecc | 9cbd22ce203ab7f40d6e02a7ee2b565461369b45 | /bagbankde/items.py | 768a82202c55a20a6a785aed852583aebad99500 | [] | no_license | hristo-grudev/bagbankde | a506ed6af28db7ad4c609d7fbd922d5a699b64d6 | 1afcb0454b9e498c4b4eccb233b7d2aa15823513 | refs/heads/main | 2023-03-26T14:09:08.641400 | 2021-03-18T14:16:31 | 2021-03-18T14:16:31 | 349,100,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | import scrapy
class BagbankdeItem(scrapy.Item):
title = scrapy.Field()
description = scrapy.Field()
date = scrapy.Field()
| [
"[email protected]"
] | |
7d42995cc032265dc1da6c26ba81455cc32bcebd | c60c199410289c1d7ec4aea00833b461e1f08f88 | /.history/older-than/older/source-example/day2/user-list.py | a923e529b041db39bfa93f7bc43cb926236f86e4 | [] | no_license | ver007/pythonjumpstart | 66fb111e6af197fad3e853b2c2d712a1b57a7d59 | 5b1f52479abd07456e2da494149e491d398f3b7d | refs/heads/master | 2021-01-21T01:34:35.501870 | 2015-05-13T14:10:13 | 2015-05-13T14:10:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | #!/usr/bin/env python
users = [ line.split(':')[0] for line in open('/etc/passwd') if '#' not in line and '!' in line ]
users.sort()
for (i, n) in enumerate(users):
print i, ":", n
| [
"[email protected]"
] | |
0933da67fd790e5811ce8b580f16a0ab1a3f6a75 | 32bbbd6dbd100bbb9a2282f69ac3b7b34516347f | /Study/keras/keras44_cifar100_2_cnn.py | 88e3f8742ac013b4f6a6c64966e550971666ddae | [] | no_license | kimjh1753/AIA_Academy_Study | 2162d4d4f1a6b8ca1870f86d540df45a8742f359 | 6022718ae7f9e5170a19c4786d096c8042894ead | refs/heads/master | 2023-05-07T12:29:12.920693 | 2021-06-05T01:09:33 | 2021-06-05T01:09:33 | 324,136,796 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,971 | py | # 1. 데이터
from tensorflow.keras.datasets import cifar100
(x_train, y_train), (x_test, y_test) = cifar100.load_data()
print(x_train.shape, y_train.shape) # (50000, 32, 32, 3) (50000, 1)
print(x_test.shape, y_test.shape) # (10000, 32, 32, 3) (10000, 1)
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2], x_train.shape[3])/255.
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], x_test.shape[3])/255.
# OneHotEncoding
from tensorflow.keras.utils import to_categorical
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
print(y_train.shape, y_test.shape) # (50000, 100) (10000, 100)
# 2. 모델 구성
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout
model = Sequential()
model.add(Conv2D(filters=10, kernel_size=(2,2), padding='same',
strides=1, input_shape=(32, 32, 3)))
model.add(Dropout(0.2))
model.add(Conv2D(9, (2,2), padding='valid'))
model.add(Flatten())
model.add(Dense(1000, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(1000, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(1000, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(1000, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(1000, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(1000, activation='relu'))
model.add(Dense(100, activation='softmax'))
# 3. 컴파일, 훈련
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['acc'])
from tensorflow.keras.callbacks import EarlyStopping
es = EarlyStopping(monitor='loss', patience=30, mode='auto')
model.fit(x_train, y_train, epochs=2000, batch_size=2000, validation_split=0.2, verbose=1, callbacks=[es])
# 4. 평가, 예측
loss, acc = model.evaluate(x_test, y_test, batch_size=1)
print("loss : ", loss)
print("loss : ", acc)
# keras cifar100 cnn
# loss : 5.992544174194336
# loss : 0.23280000686645508 | [
"[email protected]"
] | |
6c043811b2da3f373efa06bc8156705996b15ee9 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adverbs/_never.py | d9002c9003bf7b8c0007df237bda667fddc3bf4d | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py |
#calss header
class _NEVER():
def __init__(self,):
self.name = "NEVER"
self.definitions = [u'not at any time or not on any occasion: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adverbs'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"[email protected]"
] | |
6a1e6c2874181f6c5859c830e394359834617163 | 747f759311d404af31c0f80029e88098193f6269 | /extra-addons/training_doc/__init__.py | 4cb47ad014a13cc816addb240e952f246358cbea | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,097 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jesús Martín <[email protected]>
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import training_doc
import wizard
| [
"[email protected]"
] | |
239de711038d222c388248ee584d39770975bd23 | d53bc632503254ca0d5099fe457c02c07212a131 | /middleware1/testApp/middleware.py | acbf1c0c7c41b9e5d9f591e64615887737e2f158 | [] | no_license | srikar1993/django | ba8428f6e1162cc40f2d034126e7baf29eb62edc | 2199d5d94accc7bce5b3fac4a4b7b1444e39b35f | refs/heads/master | 2023-07-14T21:10:52.654992 | 2021-08-26T06:37:04 | 2021-08-26T06:37:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,950 | py | from django.http import HttpResponse
class ExecutionFlowMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Pre processing of request
print('This line is printed at pre-processing of request...')
# Forwarding the request to next level
response = self.get_response(request)
# Post processing of request
print('This line is printed at post-processing of request...')
return response
class AppMaintainanceMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
return HttpResponse('<h1>Application Under Maintainance...</h1><h1>Please Try Again After Some Time</h1>')
class ErrorMessageMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
return response
def process_exception(self, request, exception):
mymsg = '''
<h1>Please check the input you have provided...</h1><hr>
'''
exceptioninfo = f'''
<h1>Raised Exception: {exception.__class__.__name__}</h1>
<h1>Exception Message: {exception}</h1>
'''
return HttpResponse(mymsg + exceptioninfo)
class FirstMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Pre processing of request
print('This line is printed by First Middleware at pre-processing of request...')
# Forwarding the request to next level
response = self.get_response(request)
# Post processing of request
print('This line is printed by First Middleware at post-processing of request...')
return response
class SecondMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Pre processing of request
print('This line is printed by Second Middleware at pre-processing of request...')
# Forwarding the request to next level
response = self.get_response(request)
# Post processing of request
print('This line is printed by Second Middleware at post-processing of request...')
return response
class ThirdMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Pre processing of request
print('This line is printed by Third Middleware at pre-processing of request...')
# Forwarding the request to next level
response = self.get_response(request)
# Post processing of request
print('This line is printed by Third Middleware at post-processing of request...')
return response
| [
"[email protected]"
] | |
6d13ea298a8c4814de41ef50e5ca2ebf16d19711 | c9b5a2cd00764ee4a0b889b5b602eb28fd08e989 | /python/238-Product of Array Except Self.py | 600d5102cd52a0453255a55f78ed445ca39932d5 | [] | no_license | cwza/leetcode | 39799a6730185fa06913e3beebebd3e7b2e5d31a | 72136e3487d239f5b37e2d6393e034262a6bf599 | refs/heads/master | 2023-04-05T16:19:08.243139 | 2021-04-22T04:46:45 | 2021-04-22T04:46:45 | 344,026,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,258 | py | from typing import List
class Solution:
# def productExceptSelf(self, nums: List[int]) -> List[int]:
# "Precompute L, R product, Time: O(n), Space: O(n)"
# n = len(nums)
# # L[i]: product of nums[0, i-1]
# L = [1]*n
# for i in range(1, n):
# L[i] = L[i-1] * nums[i-1]
# # R[i]: product of nums[i+1, n]
# R = [1]*n
# for i in reversed(range(n-1)):
# R[i] = R[i+1] * nums[i+1]
# # print(L, R)
# result = [1]*n
# for i in range(n):
# result[i] = L[i] * R[i]
# return result
def productExceptSelf(self, nums: List[int]) -> List[int]:
"Use output list as R and Compute L on the fly, Time: O(n), Space: O(1) exclude space used by output"
n = len(nums)
R = [1]*n # This is also be used as result list
for i in reversed(range(n-1)):
R[i] = R[i+1] * nums[i+1]
cur_L = 1
for i in range(n):
R[i] = cur_L * R[i]
cur_L = cur_L * nums[i]
return R
nums = [2,3,4,5]
result = Solution().productExceptSelf(nums)
assert result == [60,40,30,24]
nums = [2,3,0,5]
result = Solution().productExceptSelf(nums)
assert result == [0,0,30,0] | [
"[email protected]"
] | |
4c6c5d18a00823a83ef35c263e076351815ec55a | 98591a80b7881385dc15a7aee3298aed68efbc32 | /MODEL1302010025/model.py | 7776531980fb768cf4985182c7a6bdc908a3c3e7 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | biomodels/MODEL1302010025 | 9f49612839a3c29dd8034bf17a58a6caa3e1a8eb | 852113c7356661180c266a701e56dc8bc789a898 | refs/heads/master | 2020-12-24T14:44:47.764710 | 2014-10-16T05:57:03 | 2014-10-16T05:57:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 427 | py | import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1302010025.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) | [
"[email protected]"
] | |
9f281fc9d686425e97b54cdc34eb570c1fe19b42 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02709/s208022099.py | 7c3413420f8ed9fc0b8a40a4b007da745e363f1f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | from sys import stdin
def main():
n = int(stdin.readline())
a = list(map(int, stdin.readline().split()))
a2 = [[a[i], i] for i in range(n)]
a2.sort(reverse=True)
dp = [[0 for _ in range(n + 1)] for _ in range(n + 1)]
ans = 0
for i in range(n + 1):
for j in range(n + 1 - i):
s1 = s2 = 0
if i > 0:
s1 = dp[i - 1][j] + a2[i + j - 1][0] * (a2[i + j - 1][1] - (i - 1))
if j > 0:
s2 = dp[i][j - 1] + a2[i + j - 1][0] * ((n - j) - a2[i + j - 1][1])
dp[i][j] = max(s1, s2)
ans = max(ans, dp[i][n - i])
print(ans)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
a23647b52f355989359adaa6265a6bedbe23c029 | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_system_replacemsg_image.py | f4603def9e807083a0568dc573aa96c96a7409b3 | [
"MIT"
] | permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 9,507 | py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_replacemsg_image
short_description: Configure replacement message images in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and replacemsg_image category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.9"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Hongbin Lu (@fgtdev-hblu)
- Frank Shen (@frankshen01)
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
access_token:
description:
- Token-based authentication.
Generated from GUI of Fortigate.
type: str
required: false
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_replacemsg_image:
description:
- Configure replacement message images.
default: null
type: dict
suboptions:
image_base64:
description:
- Image data.
type: str
image_type:
description:
- Image type.
type: str
choices:
- gif
- jpg
- tiff
- png
name:
description:
- Image name.
required: true
type: str
'''
EXAMPLES = '''
- hosts: fortigates
collections:
- fortinet.fortios
connection: httpapi
vars:
vdom: "root"
ansible_httpapi_use_ssl: yes
ansible_httpapi_validate_certs: no
ansible_httpapi_port: 443
tasks:
- name: Configure replacement message images.
fortios_system_replacemsg_image:
vdom: "{{ vdom }}"
state: "present"
access_token: "<your_own_value>"
system_replacemsg_image:
image_base64: "<your_own_value>"
image_type: "gif"
name: "default_name_5"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_legacy_fortiosapi
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
def filter_system_replacemsg_image_data(json):
option_list = ['image_base64', 'image_type', 'name']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_replacemsg_image(data, fos):
vdom = data['vdom']
state = data['state']
system_replacemsg_image_data = data['system_replacemsg_image']
filtered_data = underscore_to_hyphen(filter_system_replacemsg_image_data(system_replacemsg_image_data))
if state == "present":
return fos.set('system',
'replacemsg-image',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'replacemsg-image',
mkey=filtered_data['name'],
vdom=vdom)
else:
fos._module.fail_json(msg='state must be present or absent!')
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_replacemsg_image']:
resp = system_replacemsg_image(data, fos)
else:
fos._module.fail_json(msg='missing task body: %s' % ('system_replacemsg_image'))
return not is_successful_status(resp), \
resp['status'] == "success" and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
def main():
mkeyname = 'name'
fields = {
"access_token": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_replacemsg_image": {
"required": False, "type": "dict", "default": None,
"options": {
"image_base64": {"required": False, "type": "str"},
"image_type": {"required": False, "type": "str",
"choices": ["gif",
"jpg",
"tiff",
"png"]},
"name": {"required": True, "type": "str"}
}
}
}
check_legacy_fortiosapi()
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if 'access_token' in module.params:
connection.set_option('access_token', module.params['access_token'])
fos = FortiOSHandler(connection, module, mkeyname)
is_error, has_changed, result = fortios_system(module.params, fos)
versions_check_result = connection.get_system_version()
else:
module.fail_json(**FAIL_SOCKET_MSG)
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and galaxy, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
40125cfd752c5de08af72a21e324f89b505db21d | 2603f28e3dc17ae2409554ee6e1cbd315a28b732 | /ABC38/prob_c.py | f33438f7f66d9238ccdeb20f211ec805df4b4225 | [] | no_license | steinstadt/AtCoder | 69f172280e89f4249e673cae9beab9428e2a4369 | cd6c7f577fcf0cb4c57ff184afdc163f7501acf5 | refs/heads/master | 2020-12-23T12:03:29.124134 | 2020-11-22T10:47:40 | 2020-11-22T10:47:40 | 237,144,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | # Problem C - 単調増加
# input
N = int(input())
a_list = list(map(int, input().split()))
# initialization
ans = 0
# shakutori count
right = 0
for left in range(N):
while (right<N and a_list[right-1]<a_list[right]) or left==right:
right += 1
ans += right - left
if left==right:
right += 1
# output
print(ans)
| [
"[email protected]"
] | |
c73896b41397d024ffcb55baeaa9f677221d9361 | 6e8d58340f2be5f00d55e2629052c0bbc9dcf390 | /eggs/Cheetah-2.2.2-py2.6-linux-x86_64-ucs4.egg/Cheetah/Tools/MondoReport.py | d0fada29f11cec17ce7def984ffabddd6bbf15ad | [
"CC-BY-2.5",
"MIT"
] | permissive | JCVI-Cloud/galaxy-tools-prok | e57389750d33ac766e1658838cdb0aaf9a59c106 | 3c44ecaf4b2e1f2d7269eabef19cbd2e88b3a99c | refs/heads/master | 2021-05-02T06:23:05.414371 | 2014-03-21T18:12:43 | 2014-03-21T18:12:43 | 6,092,693 | 0 | 2 | NOASSERTION | 2020-07-25T20:38:17 | 2012-10-05T15:57:38 | Python | UTF-8 | Python | false | false | 12,935 | py | """
@@TR: This code is pretty much unsupported.
MondoReport.py -- Batching module for Python and Cheetah.
Version 2001-Nov-18. Doesn't do much practical yet, but the companion
testMondoReport.py passes all its tests.
-Mike Orr (Iron)
TODO: BatchRecord.prev/next/prev_batches/next_batches/query, prev.query,
next.query.
How about Report: .page(), .all(), .summary()? Or PageBreaker.
"""
import operator, types
try:
from Cheetah.NameMapper import valueForKey as lookup_func
except ImportError:
def lookup_func(obj, name):
if hasattr(obj, name):
return getattr(obj, name)
else:
return obj[name] # Raises KeyError.
########## CONSTANTS ##############################
True, False = (1==1), (1==0)
numericTypes = types.IntType, types.LongType, types.FloatType
########## PUBLIC GENERIC FUNCTIONS ##############################
class NegativeError(ValueError):
pass
def isNumeric(v):
return type(v) in numericTypes
def isNonNegative(v):
ret = isNumeric(v)
if ret and v < 0:
raise NegativeError(v)
def isNotNone(v):
return v is not None
def Roman(n):
n = int(n) # Raises TypeError.
if n < 1:
raise ValueError("roman numeral for zero or negative undefined: " + n)
roman = ''
while n >= 1000:
n = n - 1000
roman = roman + 'M'
while n >= 500:
n = n - 500
roman = roman + 'D'
while n >= 100:
n = n - 100
roman = roman + 'C'
while n >= 50:
n = n - 50
roman = roman + 'L'
while n >= 10:
n = n - 10
roman = roman + 'X'
while n >= 5:
n = n - 5
roman = roman + 'V'
while n < 5 and n >= 1:
n = n - 1
roman = roman + 'I'
roman = roman.replace('DCCCC', 'CM')
roman = roman.replace('CCCC', 'CD')
roman = roman.replace('LXXXX', 'XC')
roman = roman.replace('XXXX', 'XL')
roman = roman.replace('VIIII', 'IX')
roman = roman.replace('IIII', 'IV')
return roman
def sum(lis):
return reduce(operator.add, lis, 0)
def mean(lis):
"""Always returns a floating-point number.
"""
lis_len = len(lis)
if lis_len == 0:
return 0.00 # Avoid ZeroDivisionError (not raised for floats anyway)
total = float( sum(lis) )
return total / lis_len
def median(lis):
lis = lis[:]
lis.sort()
return lis[int(len(lis)/2)]
def variance(lis):
raise NotImplementedError()
def variance_n(lis):
raise NotImplementedError()
def standardDeviation(lis):
raise NotImplementedError()
def standardDeviation_n(lis):
raise NotImplementedError()
class IndexFormats:
"""Eight ways to display a subscript index.
("Fifty ways to leave your lover....")
"""
def __init__(self, index, item=None):
self._index = index
self._number = index + 1
self._item = item
def index(self):
return self._index
__call__ = index
def number(self):
return self._number
def even(self):
return self._number % 2 == 0
def odd(self):
return not self.even()
def even_i(self):
return self._index % 2 == 0
def odd_i(self):
return not self.even_i()
def letter(self):
return self.Letter().lower()
def Letter(self):
n = ord('A') + self._index
return chr(n)
def roman(self):
return self.Roman().lower()
def Roman(self):
return Roman(self._number)
def item(self):
return self._item
########## PRIVATE CLASSES ##############################
class ValuesGetterMixin:
def __init__(self, origList):
self._origList = origList
def _getValues(self, field=None, criteria=None):
if field:
ret = [lookup_func(elm, field) for elm in self._origList]
else:
ret = self._origList
if criteria:
ret = filter(criteria, ret)
return ret
class RecordStats(IndexFormats, ValuesGetterMixin):
"""The statistics that depend on the current record.
"""
def __init__(self, origList, index):
record = origList[index] # Raises IndexError.
IndexFormats.__init__(self, index, record)
ValuesGetterMixin.__init__(self, origList)
def length(self):
return len(self._origList)
def first(self):
return self._index == 0
def last(self):
return self._index >= len(self._origList) - 1
def _firstOrLastValue(self, field, currentIndex, otherIndex):
currentValue = self._origList[currentIndex] # Raises IndexError.
try:
otherValue = self._origList[otherIndex]
except IndexError:
return True
if field:
currentValue = lookup_func(currentValue, field)
otherValue = lookup_func(otherValue, field)
return currentValue != otherValue
def firstValue(self, field=None):
return self._firstOrLastValue(field, self._index, self._index - 1)
def lastValue(self, field=None):
return self._firstOrLastValue(field, self._index, self._index + 1)
# firstPage and lastPage not implemented. Needed?
def percentOfTotal(self, field=None, suffix='%', default='N/A', decimals=2):
rec = self._origList[self._index]
if field:
val = lookup_func(rec, field)
else:
val = rec
try:
lis = self._getValues(field, isNumeric)
except NegativeError:
return default
total = sum(lis)
if total == 0.00: # Avoid ZeroDivisionError.
return default
val = float(val)
try:
percent = (val / total) * 100
except ZeroDivisionError:
return default
if decimals == 0:
percent = int(percent)
else:
percent = round(percent, decimals)
if suffix:
return str(percent) + suffix # String.
else:
return percent # Numeric.
def __call__(self): # Overrides IndexFormats.__call__
"""This instance is not callable, so we override the super method.
"""
raise NotImplementedError()
def prev(self):
if self._index == 0:
return None
else:
length = self.length()
start = self._index - length
return PrevNextPage(self._origList, length, start)
def next(self):
if self._index + self.length() == self.length():
return None
else:
length = self.length()
start = self._index + length
return PrevNextPage(self._origList, length, start)
def prevPages(self):
raise NotImplementedError()
def nextPages(self):
raise NotImplementedError()
prev_batches = prevPages
next_batches = nextPages
def summary(self):
raise NotImplementedError()
def _prevNextHelper(self, start,end,size,orphan,sequence):
"""Copied from Zope's DT_InSV.py's "opt" function.
"""
if size < 1:
if start > 0 and end > 0 and end >= start:
size=end+1-start
else: size=7
if start > 0:
try: sequence[start-1]
except: start=len(sequence)
# if start > l: start=l
if end > 0:
if end < start: end=start
else:
end=start+size-1
try: sequence[end+orphan-1]
except: end=len(sequence)
# if l - end < orphan: end=l
elif end > 0:
try: sequence[end-1]
except: end=len(sequence)
# if end > l: end=l
start=end+1-size
if start - 1 < orphan: start=1
else:
start=1
end=start+size-1
try: sequence[end+orphan-1]
except: end=len(sequence)
# if l - end < orphan: end=l
return start,end,size
class Summary(ValuesGetterMixin):
"""The summary statistics, that don't depend on the current record.
"""
def __init__(self, origList):
ValuesGetterMixin.__init__(self, origList)
def sum(self, field=None):
lis = self._getValues(field, isNumeric)
return sum(lis)
total = sum
def count(self, field=None):
lis = self._getValues(field, isNotNone)
return len(lis)
def min(self, field=None):
lis = self._getValues(field, isNotNone)
return min(lis) # Python builtin function min.
def max(self, field=None):
lis = self._getValues(field, isNotNone)
return max(lis) # Python builtin function max.
def mean(self, field=None):
"""Always returns a floating point number.
"""
lis = self._getValues(field, isNumeric)
return mean(lis)
average = mean
def median(self, field=None):
lis = self._getValues(field, isNumeric)
return median(lis)
def variance(self, field=None):
raiseNotImplementedError()
def variance_n(self, field=None):
raiseNotImplementedError()
def standardDeviation(self, field=None):
raiseNotImplementedError()
def standardDeviation_n(self, field=None):
raiseNotImplementedError()
class PrevNextPage:
def __init__(self, origList, size, start):
end = start + size
self.start = IndexFormats(start, origList[start])
self.end = IndexFormats(end, origList[end])
self.length = size
########## MAIN PUBLIC CLASS ##############################
class MondoReport:
_RecordStatsClass = RecordStats
_SummaryClass = Summary
def __init__(self, origlist):
self._origList = origlist
def page(self, size, start, overlap=0, orphan=0):
"""Returns list of ($r, $a, $b)
"""
if overlap != 0:
raise NotImplementedError("non-zero overlap")
if orphan != 0:
raise NotImplementedError("non-zero orphan")
origList = self._origList
origList_len = len(origList)
start = max(0, start)
end = min( start + size, len(self._origList) )
mySlice = origList[start:end]
ret = []
for rel in range(size):
abs_ = start + rel
r = mySlice[rel]
a = self._RecordStatsClass(origList, abs_)
b = self._RecordStatsClass(mySlice, rel)
tup = r, a, b
ret.append(tup)
return ret
batch = page
def all(self):
origList_len = len(self._origList)
return self.page(origList_len, 0, 0, 0)
def summary(self):
return self._SummaryClass(self._origList)
"""
**********************************
Return a pageful of records from a sequence, with statistics.
in : origlist, list or tuple. The entire set of records. This is
usually a list of objects or a list of dictionaries.
page, int >= 0. Which page to display.
size, int >= 1. How many records per page.
widow, int >=0. Not implemented.
orphan, int >=0. Not implemented.
base, int >=0. Number of first page (usually 0 or 1).
out: list of (o, b) pairs. The records for the current page. 'o' is
the original element from 'origlist' unchanged. 'b' is a Batch
object containing meta-info about 'o'.
exc: IndexError if 'page' or 'size' is < 1. If 'origlist' is empty or
'page' is too high, it returns an empty list rather than raising
an error.
origlist_len = len(origlist)
start = (page + base) * size
end = min(start + size, origlist_len)
ret = []
# widow, orphan calculation: adjust 'start' and 'end' up and down,
# Set 'widow', 'orphan', 'first_nonwidow', 'first_nonorphan' attributes.
for i in range(start, end):
o = origlist[i]
b = Batch(origlist, size, i)
tup = o, b
ret.append(tup)
return ret
def prev(self):
# return a PrevNextPage or None
def next(self):
# return a PrevNextPage or None
def prev_batches(self):
# return a list of SimpleBatch for the previous batches
def next_batches(self):
# return a list of SimpleBatch for the next batches
########## PUBLIC MIXIN CLASS FOR CHEETAH TEMPLATES ##############
class MondoReportMixin:
def batch(self, origList, size=None, start=0, overlap=0, orphan=0):
bat = MondoReport(origList)
return bat.batch(size, start, overlap, orphan)
def batchstats(self, origList):
bat = MondoReport(origList)
return bat.stats()
"""
# vim: shiftwidth=4 tabstop=4 expandtab textwidth=79
| [
"[email protected]"
] | |
12d4e303ec37dc162f5cd4b655c882bf2ae8429b | 0b77f11bfb68d465e99fdfcea8bef63013409df8 | /reports/views.py | e7046593d76822fccfcdfe0b0bab740325b0bb42 | [] | no_license | dbsiavichay/furb | dea1de7d3085bd41a668a6581a4997ff50a58afe | 36dea81c23d614bceaf35b38a5861a2ca095ea98 | refs/heads/master | 2020-06-28T06:05:42.313533 | 2019-03-14T15:37:20 | 2019-03-14T15:37:20 | 74,506,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,486 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import ListView, TemplateView
from wildlife.models import Animal, Kind
from location.models import Parish
from django.conf import settings
from os.path import isfile, join
from io import BytesIO
from reportlab.lib import colors
from reportlab.lib.pagesizes import A4, landscape
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.platypus import SimpleDocTemplate, Paragraph, TableStyle, Table, Image
from reportlab.lib.units import cm, mm
from wildlife.views import get_letterhead_page, NumberedCanvas
class ParishListView(ListView):
model = Parish
template_name = 'reports/animal_by_parish.html'
queryset = Parish.objects.filter(canton_code='1401')
class StatsListView(ListView):
model = Parish
template_name = 'reports/animal_stats.html'
queryset = Parish.objects.filter(canton_code='1401')
def get_context_data(self, **kwargs):
from datetime import date
context = super(StatsListView, self).get_context_data(**kwargs)
years = range(2017, date.today().year + 1)
months = [
(1, 'ENERO'),
(2, 'FEBRERO'),
(3, 'MARZO'),
(4, 'ABRIL'),
(5, 'MAYO'),
(6, 'JUNIO'),
(7, 'JULIO'),
(8, 'AGOSTO'),
(9, 'SEPTIEMBRE'),
(10, 'OCTUBRE'),
(11, 'NOVIEMBRE'),
(12, 'DICIEMBRE'),
]
context.update({
'months': months,
'years':years,
})
return context
def get_by_parish(request, parish):
# Create the HttpResponse object with the appropriate PDF headers.
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename=reporte.pdf'
sterilized = request.GET.get('sterilized', False)
pdf = get_animal_by_parish_report(parish, sterilized)
response.write(pdf)
return response
def get_animal_stats(request, month, year):
# Create the HttpResponse object with the appropriate PDF headers.
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename=reporte.pdf'
pdf = get_chart_by_month(month, year)
response.write(pdf)
return response
def get_animal_by_parish_report(parish, sterilized):
animals = Animal.objects.filter(parish=parish).order_by('owner')
if sterilized: animals = animals.filter(want_sterilize=True)
buff = BytesIO()
doc = SimpleDocTemplate(buff,pagesize=A4,rightMargin=60, leftMargin=40, topMargin=75, bottomMargin=50,)
styles = getSampleStyleSheet()
path = join(settings.BASE_DIR, 'static/assets/report/checked.png')
report = [
Paragraph("DIRECCIÓN DE GESTION AMBIENTAL Y SERVICIOS PÚBLICOS", styles['Title']),
Paragraph("REPORTE DE FAUNA POR PARROQUIA", styles['Title']),
]
tstyle = TableStyle([
('LINEBELOW',(0,0),(-1,-1),0.1, colors.gray),
('TOPPADDING',(0,0),(-1,-1), 5),
('BOTTOMPADDING',(0,0),(-1,-1), 0),
('LEFTPADDING',(0,0),(-1,-1), 0),
('RIGHTPADDING',(0,0),(-1,-1), 0),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
])
# tstyle = TableStyle([
# ('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
# ('LEFTPADDING',(0,0),(-1,-1), 3),
# ('RIGHTPADDING',(0,0),(-1,-1), 3),
# ('BOTTOMPADDING',(0,0),(-1,-1), 0),
# ('BOX', (0, 0), (-1, -1), 0.5, colors.black),
# ('ALIGN',(0,0),(0,-1),'RIGHT'),
# ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
# ('BACKGROUND', (0, 0), (-1, 0), colors.gray)
# ])
headingStyle = styles['Heading5']
headingStyle.fontSize = 6
contentStyle = styles['BodyText']
contentStyle.fontSize = 5
columns_width = [0.5*cm, 1.4*cm, 2.5*cm,1.2*cm,1.8*cm,1.5*cm,4.5*cm,3*cm]
headings = ('N°', 'CÓDIGO', 'NOMBRE','ESPECIE', 'ESTERILIZAR?', 'UBICACIÓN', 'PROPIETARIO', 'CONTACTO')
headings = (Paragraph(h, headingStyle) for h in headings)
content = [(
Paragraph(str(index + 1), contentStyle),
Paragraph(animal.code, contentStyle),
Paragraph(animal.name.title(), contentStyle),
Paragraph(animal.breed.kind.name.title(), contentStyle),
Image(path, width=2.5*mm, height=2.5*mm) if animal.want_sterilize else Paragraph('', contentStyle),
Paragraph(animal.get_parish_name().title(), contentStyle),
Paragraph(animal.get_owner_name().title(), contentStyle),
Paragraph(animal.get_owner_contact().title(), contentStyle),
) for index, animal in enumerate(animals)] if len(animals) else [('Sin datos.',)]
table = Table([headings] + content, columns_width, style=tstyle, )
report.append(table)
doc.build(report,canvasmaker=NumberedCanvas,onFirstPage=get_letterhead_page,onLaterPages=get_letterhead_page)
return buff.getvalue()
def get_chart_by_month(month, year):
buff = BytesIO()
months = [
'ENERO','FEBRERO','MARZO','ABRIL',
'MAYO','JUNIO','JULIO','AGOSTO',
'SEPTIEMBRE','OCTUBRE','NOVIEMBRE','DICIEMBRE',
]
doc = SimpleDocTemplate(buff,pagesize=A4,rightMargin=60, leftMargin=40, topMargin=75, bottomMargin=50,)
styles = getSampleStyleSheet()
report = [
Paragraph("DIRECCIÓN DE GESTION AMBIENTAL Y SERVICIOS PÚBLICOS", styles['Title']),
Paragraph('REPORTE ESTADISTICO %s %s' % (months[int(month)-1],year), styles['Title']),
]
parishes = Parish.objects.filter(canton_code='1401')
kinds = Kind.objects.all()
for kind in kinds:
_animals = Animal.objects.filter(
breed__kind=kind,
date_joined__year = int(year),
date_joined__month = int(month)
)
data = []
labels = []
for parish in parishes:
animals = _animals.filter(parish=parish.code)
if len(animals) > 0:
percent = (len(animals) * 100.00) / len(_animals)
data.append(len(animals))
labels.append('%s (%0.2f%%)' % (parish.name.encode('utf-8'), percent))
if len(data) > 0:
report.append(Paragraph(kind.name, styles['Heading3']))
chart = create_pie_chart(data, labels, True)
report.append(chart)
doc.build(report,canvasmaker=NumberedCanvas,onFirstPage=get_letterhead_page,onLaterPages=get_letterhead_page)
return buff.getvalue()
colores = [
colors.HexColor('#7fffd4'),
colors.HexColor('#0000ff'),
colors.HexColor('#a52a2a'),
colors.HexColor('#ff7f50'),
colors.HexColor('#a9a9a9'),
colors.HexColor('#008b8b'),
colors.HexColor('#8b0000'),
colors.HexColor('#ff00ff'),
colors.HexColor('#00008b'),
colors.HexColor('#008000'),
colors.HexColor('#adff2f'),
colors.HexColor('#00ff00'),
colors.HexColor('#ff00ff'),
colors.HexColor('#ffa500'),
colors.HexColor('#ff0000'),
colors.HexColor('#ee82ee'),
colors.HexColor('#ffff00'),
]
def add_legend(draw_obj, chart, data):
from reportlab.graphics.charts.legends import Legend
from reportlab.lib.validators import Auto
legend = Legend()
legend.alignment = 'right'
legend.x = 90
legend.y = 50
legend.colorNamePairs = [(chart.slices[i].fillColor, (chart.labels[i].split('(')[0], '%s' % chart.data[i])) for i in range(0, len(data))]
draw_obj.add(legend)
def create_pie_chart(data, labels, legend=False):
from reportlab.graphics.charts.piecharts import Pie
from reportlab.graphics.shapes import Drawing
d = Drawing(250, 275)
pie = Pie()
# required by Auto
pie._seriesCount = len(data)
pie.x = 175
pie.y = 100
pie.width = 150
pie.height = 150
pie.data = data
pie.labels = labels
pie.simpleLabels = 0
pie.sideLabels = True
pie.slices.strokeWidth = 0.5
for i in range (0, len(colores)):
pie.slices[i].fillColor = colores[i]
if legend:
add_legend(d, pie, data)
d.add(pie)
#d.save(formats=['pdf'], outDir='.', fnRoot='test-pie')
return d
| [
"[email protected]"
] | |
53ef131a0b9babc5af8fa15c91c4fca6cc7de93c | 69c882c678103b182988fb60d3e898d569980f1c | /Day 4/day4prog4.py | 5f6224da0ba42f841f9b5541f0a3d0a63e87733b | [] | no_license | gittygupta/stcet-python | 44be9d91cdd6215879d9f04497214819228821be | e77456172746ee76b6e2a901ddb0c3dbe457f82a | refs/heads/master | 2022-03-05T11:37:08.720226 | 2019-12-01T00:56:03 | 2019-12-01T00:56:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | n=2
while n<1000:
flag=1
for i in range (2,n):
if n%i==0:
flag=0
if flag==1:
print(n)
n+=1
| [
"[email protected]"
] | |
f5b323edda211f6994625eb9f7b4a4f34eaa4abc | 349c9829279a9e99d18d1572d7aa9a31ec4e048c | /Topics/Time module/Easy convert/main.py | 69179e696eb022d47901aed90bce5e6f36e9317e | [] | no_license | Helen-Sk-2020/JtBr_Arithmetic_Exam_Application | 2b0eb871a89f3e4370f21e11c0c83b1885a8f4ae | a7e1b4a4c09df159c98da1a151db55848ba4d8a4 | refs/heads/master | 2023-08-18T22:42:11.809697 | 2021-10-14T10:07:36 | 2021-10-14T10:07:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | import time
current_time = time.ctime(time.time())
print(current_time)
| [
"[email protected]"
] | |
3457d5a9fc1cb829b5810e28cb19b670b4a2c408 | 79f42fd0de70f0fea931af610faeca3205fd54d4 | /base_lib/ChartDirector/pythondemo_cgi/finance2.py | d21cccb185021c83f32e480286c061357e3302a6 | [
"IJG"
] | permissive | fanwen390922198/ceph_pressure_test | a900a6dc20473ae3ff1241188ed012d22de2eace | b6a5b6d324e935915090e791d9722d921f659b26 | refs/heads/main | 2021-08-27T16:26:57.500359 | 2021-06-02T05:18:39 | 2021-06-02T05:18:39 | 115,672,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,647 | py | #!/usr/bin/python
from FinanceChart import *
# Create a finance chart demo containing 100 days of data
noOfDays = 100
# To compute moving averages starting from the first day, we need to get extra data points before
# the first day
extraDays = 30
# In this exammple, we use a random number generator utility to simulate the data. We set up the
# random table to create 6 cols x (noOfDays + extraDays) rows, using 9 as the seed.
rantable = RanTable(9, 6, noOfDays + extraDays)
# Set the 1st col to be the timeStamp, starting from Sep 4, 2002, with each row representing one
# day, and counting week days only (jump over Sat and Sun)
rantable.setDateCol(0, chartTime(2002, 9, 4), 86400, 1)
# Set the 2nd, 3rd, 4th and 5th columns to be high, low, open and close data. The open value starts
# from 100, and the daily change is random from -5 to 5.
rantable.setHLOCCols(1, 100, -5, 5)
# Set the 6th column as the vol data from 5 to 25 million
rantable.setCol(5, 50000000, 250000000)
# Now we read the data from the table into arrays
timeStamps = rantable.getCol(0)
highData = rantable.getCol(1)
lowData = rantable.getCol(2)
openData = rantable.getCol(3)
closeData = rantable.getCol(4)
volData = rantable.getCol(5)
# Create a FinanceChart object of width 640 pixels
c = FinanceChart(640)
# Add a title to the chart
c.addTitle("Finance Chart Demonstration")
# Set the data into the finance chart object
c.setData(timeStamps, highData, lowData, openData, closeData, volData, extraDays)
# Add a slow stochastic chart (75 pixels high) with %K = 14 and %D = 3
c.addSlowStochastic(75, 14, 3, 0x006060, 0x606000)
# Add the main chart with 240 pixels in height
c.addMainChart(240)
# Add a 10 period simple moving average to the main chart, using brown color
c.addSimpleMovingAvg(10, 0x663300)
# Add a 20 period simple moving average to the main chart, using purple color
c.addSimpleMovingAvg(20, 0x9900ff)
# Add candlestick symbols to the main chart, using green/red for up/down days
c.addCandleStick(0x00ff00, 0xff0000)
# Add 20 days donchian channel to the main chart, using light blue (9999ff) as the border and
# semi-transparent blue (c06666ff) as the fill color
c.addDonchianChannel(20, 0x9999ff, 0xc06666ff)
# Add a 75 pixels volume bars sub-chart to the bottom of the main chart, using green/red/grey for
# up/down/flat days
c.addVolBars(75, 0x99ff99, 0xff9999, 0x808080)
# Append a MACD(26, 12) indicator chart (75 pixels high) after the main chart, using 9 days for
# computing divergence.
c.addMACD(75, 26, 12, 9, 0x0000ff, 0xff00ff, 0x008000)
# Output the chart
print("Content-type: image/png\n")
binaryPrint(c.makeChart2(PNG))
| [
"[email protected]"
] | |
25c724ee779ec1d3fdd96581b62e411fcba9cf2a | 9dab41a71bf19a9ad17ee3e9f77c0f58aebd1d6d | /python/uline/uline/uline/handlers/app/merchant/statistics/transactionStatisticsHandler.py | 92e2b4165084bb877b87f18d86d5ef2703436b58 | [] | no_license | apollowesley/Demo | f0ef8ec6c4ceb0aec76771da8dd9a62fb579eac8 | 471c4af95d3a7222d6933afc571a8e52e8fe4aee | refs/heads/master | 2021-02-15T04:01:51.590697 | 2018-01-29T01:44:29 | 2018-01-29T01:44:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,042 | py | # -*- coding: utf-8 -*-
from __future__ import division
from collections import defaultdict
from datetime import datetime, timedelta
from tornado import gen
from tornado.web import (
authenticated
)
from uline.handlers.baseHandlers import MchAdminHandler
from uline.public import log
from uline.public import common
from .form import TotalStatisticsSearch
from uline.public.permit import check_permission
class TransactionStatisticsHandler(MchAdminHandler):
# executor = ThreadPoolExecutor(8)
@authenticated
@check_permission
def prepare(self):
form = TotalStatisticsSearch(self)
if not form.validate():
f_rsp = common.f_rsp(code=406, msg='fail')
self.finish(f_rsp)
return
self.query_date = 1 if not form.query_date.data else form.query_date.data
self.create_at_start = form.create_at_start.data
self.create_at_end = form.create_at_end.data
self.order_by = 'pay_time' if not form.order_by.data else form.order_by.data
self.pageindex = int(self.get_argument("p", 1))
# 模块上日期
self.last_create_at_start = datetime.today().strftime("%Y-%m-%d")
self.last_create_at_end = (
datetime.now() + timedelta(1)).strftime("%Y-%m-%d")
self.create_at_start, \
self.create_at_end, \
self.create_at_start_search, \
self.create_at_end_search = common.common_date_deal(self.query_date,
self.create_at_start, self.create_at_end)
@gen.coroutine
def get(self):
data = yield self.do_execute()
self.render('merchant/statistics/transactionStatistics.html', data=data, query_date=int(
self.query_date), order_by_form=self.order_by,
create_at_start=self.create_at_start, create_at_end=self.create_at_end)
@gen.coroutine
def post(self):
data = yield self.do_execute()
self.render('merchant/statistics/transactionStatistics.html', data=data, query_date=int(
self.query_date), order_by_form=self.order_by,
create_at_start=self.create_at_start, create_at_end=self.create_at_end)
@gen.coroutine
def do_execute(self):
last_day_count, total_count, total_count_search, search_count_details, charts = [0] * 6, [0] * 6, [
0] * 6, [], []
last_day_count_fields = ['day_tx_count_last', 'day_tx_amount_last', 'day_refund_count_last',
'day_refund_amount_last', 'day_tx_net_amout_last', 'day_profit_amount_last']
total_count_fields = ['day_tx_count_total', 'day_tx_amount_total', 'day_refund_count_total',
'day_refund_amount_total', 'day_tx_net_amout_total', 'day_profit_amount_total']
total_count_search_fields = ['day_tx_count_total', 'day_tx_amount_total', 'day_refund_count_total',
'day_refund_amount_total', 'day_tx_net_amout_total', 'day_profit_amount_total']
with self.db.get_db() as cur:
try:
last_day_count = yield self.get_pay_count(cur, self.last_create_at_start, self.last_create_at_end)
total_count = yield self.get_pay_count(cursor=cur, create_at_start=None,
create_at_end=self.last_create_at_end)
total_query_start = datetime.strptime(self.create_at_start_search, '%Y-%m-%d %H:%M:%S') + timedelta(
1) if int(self.query_date) == 2 else self.create_at_start_search
total_count_search = yield self.get_pay_count(cur, total_query_start,
self.create_at_end_search)
search_count_details = yield self.get_pay_count_detail(cur, self.create_at_start_search,
self.create_at_end_search,
(self.pageindex - 1) *
10, self.query_date,
page=True)
chart_show_details = yield self.get_pay_count_detail(cur, self.create_at_start_search,
self.create_at_end_search, 0, self.query_date,
page=False)
except Exception as err:
cur.connection.rollback()
log.exception.exception(err)
else:
cur.connection.commit()
details, total_num = search_count_details
if int(self.query_date) == 2:
months = len(common.get_mon_seq(
self.create_at_start, self.create_at_end))
if len(chart_show_details[0]) > months:
del chart_show_details[0][0]
if len(details) > months:
del details[0]
details = common.append_start_end(details, self.query_date)
navigate_html = self.get_navigate_html(total_num)
last_day_count_data = dict(zip(last_day_count_fields, last_day_count))
total_count_data = dict(zip(total_count_fields, total_count))
total_count_search_data = dict(
zip(total_count_search_fields, total_count_search))
date_range_key = common.get_date_range(self.create_at_start, self.create_at_end) if int(
self.query_date) == 1 else common.get_mon_seq(self.create_at_start, self.create_at_end)
for index in range(1, 7):
detail = yield common.deal_search_count_charts(index, date_range_key, chart_show_details[0])
charts.append(detail)
data = dict(last_day_count_data=last_day_count_data, total_count_data=total_count_data,
total_count_search_data=total_count_search_data, details=details, charts=charts,
total_num=total_num, navigate_html=navigate_html)
raise gen.Return(data)
@gen.coroutine
def get_pay_count(self, cursor, create_at_start=None, create_at_end=None):
'''
:param cursor:
:param create_at_start:
:param create_at_end:
:return:
'''
query = """select
sum(day_tx_count),
round(sum(day_tx_amount)/100, 2),
sum(day_refund_count),
abs(round(sum(day_refund_amount)/100, 2)),
round(sum(day_tx_net_amout)/100, 2),
round(sum(day_profit_amount)/100, 2)
from mch_daily_balance_info
where mch_id = %(mch_id)s
and (to_char(need_pay_time, 'YYYY-MM-DD')::timestamp between %(create_at_start)s::timestamp and %(create_at_end)s::timestamp
or %(create_at_start)s is null or %(create_at_end)s is null)"""
cursor.execute(
query,
{
'create_at_start': create_at_start,
'create_at_end': create_at_end,
'mch_id': self.current_user
}
)
ret = cursor.fetchone()
raise gen.Return(ret)
@gen.coroutine
def get_pay_count_detail(self, cursor, create_at_start, create_at_end, offset, date_switch, page=False):
'''
:param cursor:
:param create_at_start:
:param create_at_end:
:return:
'''
query = """select
{switch}
sum(day_tx_count) as day_tx_count,
round(sum(day_tx_amount), 2) as day_tx_amount,
sum(day_refund_count) as day_refund_count,
abs(round(sum(day_refund_amount), 2)) as day_refund_amount,
round(sum(day_tx_net_amout), 2) as day_tx_net_amout,
abs(round(sum(day_profit_amount), 2)) as day_profit_amount,
COUNT(*) over () as total
from mch_daily_balance_info
where to_char({date_query}, 'YYYY-MM-DD')::timestamp between %(create_at_start)s::timestamp and %(create_at_end)s::timestamp
and mch_id=%(mch_id)s
GROUP BY pay_time
"""
if page:
query += 'order by {} DESC '\
'OFFSET %(offset)s ROWS FETCH NEXT 10 ROWS ONLY;'.format(
self.order_by)
else:
query += 'order by {} DESC;'.format(self.order_by)
switch = " to_char(need_pay_time - INTERVAL '1 day', 'YYYY-MM-DD') as pay_time," if int(
date_switch) == 1 else "to_char(need_pay_time - INTERVAL '1 day','YYYY-MM') as pay_time,"
date_query = "need_pay_time" if int(
date_switch) == 1 else "need_pay_time - INTERVAL '1 day'"
query = query.format(switch=switch, date_query=date_query)
con_dict = {'mch_id': self.current_user, 'create_at_start': create_at_start,
'create_at_end': create_at_end}
if page:
con_dict.update({'offset': offset})
cursor.execute(query, con_dict)
ret = cursor.fetchall()
ret = [
(
d[0],
int(d[1]),
d[2] / 100,
int(d[3]),
abs(d[4]) / 100,
d[5] / 100,
d[6] / 100, int(d[7])) for d in ret
] if ret else []
if ret:
raise gen.Return([ret, ret[0][-1]])
raise gen.Return([ret, 0])
# @gen.coroutine
# def deal_search_count_details(self, search_count_details):
# date_range_default = defaultdict(list)
#
# for (
# pay_start_time,
# day_tx_count,
# day_tx_amount,
# day_refund_count,
# day_refund_amount,
# day_tx_net_amout,
# day_profit_amount) in search_count_details:
# if not date_range_default[pay_start_time]:
# date_range_default[pay_start_time].extend(
# [
# pay_start_time,
# day_tx_count,
# day_tx_amount,
# day_refund_count,
# day_refund_amount,
# day_tx_net_amout,
# day_profit_amount
# ]
# )
# else:
# date_range_default[pay_start_time][1] += day_tx_count
# date_range_default[pay_start_time][2] += day_tx_amount
# date_range_default[pay_start_time][3] += day_refund_count
# date_range_default[pay_start_time][4] += day_refund_amount
# date_range_default[pay_start_time][5] += day_tx_net_amout
# date_range_default[pay_start_time][6] += day_profit_amount
# details = date_range_default.values()
# raise gen.Return(sorted(details, reverse=True))
| [
"[email protected]"
] | |
49982cbda6186d5804468863bfc7a8d00d46ef96 | cac155c4a39b902213fe9efe39dbe761afb00a40 | /回溯法/leetcode/排列问题/leetcode_46_permute.py | 068c120de650f947bde4374dd12e8327b69c7a1c | [] | no_license | songyingxin/python-algorithm | 51c8d2fc785ba5bc5c3c98a17dce33cbced8cb99 | 4b1bebb7d8eb22516119acc921dfc69a72420722 | refs/heads/master | 2022-06-29T05:04:14.300542 | 2022-05-22T10:11:34 | 2022-05-22T10:11:34 | 164,998,626 | 331 | 72 | null | null | null | null | UTF-8 | Python | false | false | 696 | py |
# permute(nums[0...n-1]) = (取出一个数字) + permute(nums[0...n-1] - 这个数字)
class Solution:
def permute(self, nums: List[int]) -> List[List[int]]:
def backtracking(nums, item):
if not nums:
result.append(item)
return
for index in range(len(nums)):
tmp = nums[:index] + nums[index+1:]
tmp_item = item[:]
tmp_item.append(nums[index])
backtracking(tmp, tmp_item)
result = []
backtracking(nums, [])
return result
if __name__ == "__main__":
nums = [1,2,3]
print(Solution().permute(nums))
| [
"[email protected]"
] | |
6f83f7dc50cbc2028bf2c6b1e578b94c2a593cb0 | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Quantization/trend_Lag1Trend/cycle_12/ar_/test_artificial_1024_Quantization_Lag1Trend_12__100.py | 009dead988ececca3e907ac0f1dc2250b82392ff | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 275 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 12, transform = "Quantization", sigma = 0.0, exog_count = 100, ar_order = 0); | [
"[email protected]"
] | |
91435fe0101353d14673c598094ce8d75d7b6780 | 26c019f7dadceaf773cd292d7364582bc2a278d2 | /user_app/tests/interactors/raw_inputs.py | 0b9d1fcc1eb22362432905c8446daa399ba983be | [] | no_license | DilLip-Chowdary-Codes/Backend_Mini_Projects | 289d5213a1c62d5b2ab26397e0d684632b139ad1 | f69dc6e9de4d621b782b703f2aa41cd126d8a58b | refs/heads/master | 2022-11-12T02:09:36.600636 | 2020-07-09T15:05:21 | 2020-07-09T15:05:21 | 272,417,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,638 | py | from user_app.dtos\
import TaskDetailsDto, ProjectDto,\
StateDto, UserDto, ProjectDetailsDto,\
TaskDto, TransitionDetailsDto, ChecklistDetailsDto,\
UpdateTransitionInputDto, ChecklistStatusDto
project_data = {
"name": "projectManagement",
"description": "it's a blaw blaw blaw blaw blaw blaw ",
"workflow_id": 1,
"project_type": "Classic Software",
"developers": [1]
}
task_data = {
"project_id": 1,
"issue_type": "Enhancement",
"title": "Optimizing DB",
"description": "string",
"state_id": 1
}
user_dto = UserDto(
user_id=1,
username="username_1",
profile_pic="http://www.google.com",
phone_no="8739835635",
is_admin=True
)
developer_dto = UserDto(
user_id=2,
username="username_2",
profile_pic="http://www.google.com",
phone_no="8739835635",
is_admin=False
)
state_dto = StateDto(
name="In Progress")
state_2_dto = StateDto(
name="New State")
project_dto = ProjectDto(
name="projectManagement",
description="it's a blaw blaw blaw blaw blaw blaw ",
workflow_id=1,
project_type="Classic Software",
developers=[1]
)
project_details_dto = ProjectDetailsDto(
project_id=1,
name="projectManagement",
description="it's a blaw blaw blaw blaw blaw blaw ",
workflow="",
project_type="Classic Software",
created_by=user_dto,
created_at="2020-05-28 10:06:23",
developers=[developer_dto]
)
task_dto = TaskDto(
project_id=1,
issue_type="Enhancement",
title="Optimizing DB",
description="string",
state_id=1)
task_details_dto = TaskDetailsDto(
task_id=1,
project=project_details_dto,
issue_type="Enhancement",
title="Optimizing DB",
assignee=user_dto,
description="string",
state=state_dto
)
tasks_dtos = [task_dto]
tasks_details_dtos = [task_details_dto]
#task_transition
checklist_input_dict= {
"checklist_id": 1,
"is_checked": True
}
checklist_input_dict_2 = {
"checklist_id": 2,
"is_checked": False
}
checklist_input_dicts_list = [
checklist_input_dict,
checklist_input_dict_2]
checklist_input_dict_unsatisfied_1 = {
"checklist_id": 1,
"is_checked": False
}
checklist_input_dicts_list_unsatisfied_mandatory_fields = [
checklist_input_dict_unsatisfied_1,
checklist_input_dict_2
]
checklist_status_dto = ChecklistStatusDto(
checklist_id=checklist_input_dict['checklist_id'],
is_checked=checklist_input_dict['is_checked'])
checklist_status_dto_2 = ChecklistStatusDto(
checklist_id=checklist_input_dict_2['checklist_id'],
is_checked=checklist_input_dict_2['is_checked'])
checklist_status_dtos_list = [
checklist_status_dto, checklist_status_dto_2
]
checklist_status_dto_unsatisfied = ChecklistStatusDto(
checklist_id=checklist_input_dict_unsatisfied_1['checklist_id'],
is_checked=checklist_input_dict_unsatisfied_1['is_checked'])
checklist_status_dtos_list_unsatisfied_mandatory_fields = [
checklist_status_dto_unsatisfied,
checklist_status_dto_2
]
update_task_state_input_data = {
"user_id": 1,
"project_id": 1,
"task_id": 1,
"from_state_id": 1,
"to_state_id": 2,
"checklist": checklist_input_dicts_list
}
update_task_state_input_data_with_unchecked_mandatory_checklist = {
"user_id": 1,
"project_id": 1,
"task_id": 1,
"from_state_id": 1,
"to_state_id": 2,
"checklist": checklist_input_dicts_list_unsatisfied_mandatory_fields
}
transition_details_query_dict = {
"project_id":1,
"task_id":1,
"to_state_id":2
}
task_state_data = {
"user_id": 1,
"project_id": 1,
"task_id": 1
}
from_state_id = task_dto.state_id
update_task_state_query_dto = UpdateTransitionInputDto(
project_id=update_task_state_input_data['project_id'],
task_id=update_task_state_input_data['task_id'],
from_state_id=from_state_id,
to_state_id=update_task_state_input_data['to_state_id'],
checklist=checklist_status_dtos_list)
update_task_state_query_dto_with_unchecked_mandatory_checklist\
= UpdateTransitionInputDto(
project_id=\
update_task_state_input_data_with_unchecked_mandatory_checklist[
'project_id'],
task_id=\
update_task_state_input_data_with_unchecked_mandatory_checklist[
'task_id'],
from_state_id=from_state_id,
to_state_id=\
update_task_state_input_data_with_unchecked_mandatory_checklist[
'to_state_id'],
checklist=checklist_status_dtos_list_unsatisfied_mandatory_fields
)
| [
"[email protected]"
] | |
4872c9f499225b41609f8f51dd238b18e51fe7d8 | e079006d43a545baf2e63ef20dfefd6e778f42c2 | /Stark/urls.py | d2264c2278b9bed2c799e1f8f8b6ca59594e745d | [] | no_license | wailaifeike/myAdmin | 34a41f1f8c7b04defa92d1ed3872888ff5295445 | 8f79f8232767bae73d0fd0c326232ca33203c7e2 | refs/heads/master | 2020-04-07T11:10:29.225049 | 2018-11-20T01:55:46 | 2018-11-20T01:55:46 | 158,315,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | from django.conf.urls import url,include
from Stark import views
urlpatterns = [
url(r'',views.acc_login)
] | [
"[email protected]"
] | |
8bde39144d8acee2bd36c7ff65890ffec18fda58 | f5f781ef988d4fa2868c923597a132018eb14041 | /build/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/cmake/op3_offset_tuner_msgs-genmsg-context.py | 9e14c02a4f494952599bc5018c54c563cbb5ddc4 | [] | no_license | greenechang/christmann_ws_2019fira | 701374a30059ee63faf62cfc8dae8ea783f6c078 | a1ba2846fe1326e54366627d8812fa1bf90c70e1 | refs/heads/master | 2022-11-15T20:55:15.891128 | 2020-07-15T09:52:17 | 2020-07-15T09:52:17 | 279,816,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,063 | py | # generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/robotis/christmann_ws/src/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/msg/JointOffsetData.msg;/home/robotis/christmann_ws/src/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/msg/JointOffsetPositionData.msg;/home/robotis/christmann_ws/src/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/msg/JointTorqueOnOff.msg;/home/robotis/christmann_ws/src/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/msg/JointTorqueOnOffArray.msg"
services_str = "/home/robotis/christmann_ws/src/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/srv/GetPresentJointOffsetData.srv"
pkg_name = "op3_offset_tuner_msgs"
dependencies_str = "std_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "op3_offset_tuner_msgs;/home/robotis/christmann_ws/src/ROBOTIS-OP3-msgs/op3_offset_tuner_msgs/msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| [
"[email protected]"
] | |
92476a49a95a1903850e0fff124bebc181d4136e | 32f61223ae8818f64922d69dc8d279428cd568e3 | /AlphaTwirl/AlphaTwirl/Loop/NullCollector.py | 212e63c87a8b257d129830e32131f74f42176eca | [
"BSD-3-Clause"
] | permissive | eshwen/cutflowirl | 959fdead7cc1f58e77e68074a9ee491c3259c6d6 | e20372dc3ce276c1db4e684b8e9f1e719b9e8e7d | refs/heads/master | 2020-04-05T11:20:49.000726 | 2017-11-23T16:15:10 | 2017-11-23T16:15:10 | 81,349,000 | 0 | 0 | null | 2017-11-23T16:15:11 | 2017-02-08T16:15:59 | Python | UTF-8 | Python | false | false | 382 | py | # Tai Sakuma <[email protected]>
##__________________________________________________________________||
class NullCollector(object):
def __repr__(self):
return '{}()'.format(self.__class__.__name__)
def addReader(self, datasetName, reader):
pass
def collect(self):
pass
##__________________________________________________________________||
| [
"[email protected]"
] | |
ddb052d3917074619f88e5c250e223b616556c1b | 906c6abf6721303449a86c842a97193e86f1e88a | /sm/backup/NTCIR-Evaluation/src/GenerateXml.py | 30fa4354df107cf7417f11691a505ef644d9dd60 | [] | no_license | luochengleo/thuirwork | a5b5bedaa59dd94fde6c58d6c2ddba75fb99d374 | 2bf230949757401c15dee50249a0fa8aded595ad | refs/heads/master | 2020-04-13T12:49:03.752647 | 2014-08-31T08:37:52 | 2014-08-31T08:37:52 | 22,720,301 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,545 | py | #coding=utf8
import xml.etree.ElementTree as ET
from xml.etree.ElementTree import Element
from collections import defaultdict
import sys,csv
import codecs
from bs4 import BeautifulSoup
reload(sys)
sys.setdefaultencoding("utf8")
def loadcsv(filename):
return csv.reader(open(filename))
id2topic = dict()
for l in open('../data/temp/IMine.Query.txt').readlines():
id,topic = l.replace(codecs.BOM_UTF8,'').strip().split('\t')
id2topic[id] = topic
print id,topic
def evaid():
rtr = []
for i in range(1,34,1):
if i <10:
rtr.append('000'+str(i))
else:
rtr.append('00'+str(i))
return rtr
sls2id = dict()
for l in open('../data/temp/slsposs.txt').readlines():
segs = l.strip().split('\t')
sls2id[segs[1]] = segs[0]
id2fls2sls2queries = defaultdict(lambda:defaultdict(lambda:defaultdict(lambda:set())))
for l in loadcsv('../data/csv/task6.csv'):
query = l[1]
fls = l[3]
sls = l[4]
if sls in sls2id:
if query != '' and fls != '' and sls != '':
id = sls2id[sls]
id2fls2sls2queries[id][fls][sls].add(query)
id2fls = defaultdict(lambda:list())
id2flsposs = defaultdict(lambda:dict())
for l in open('../data/temp/flsposs.txt'):
segs = l.strip().split('\t')
id = segs[0]
fls = segs[1]
poss = float(segs[2])
id2fls[id].append(fls)
id2flsposs[id][fls] = poss
id2sls = defaultdict(lambda:list())
id2slsposs = defaultdict(lambda:dict())
for l in open('../data/temp/slsposs.txt'):
segs = l.strip().split('\t')
id = segs[0]
sls = segs[1]
poss = float(segs[2])
id2sls[id].append(sls)
id2slsposs[id][sls] = poss
for l in open('../data/temp/flsposs.txt'):
segs = ''
root = ET.Element('root')
for id in evaid():
topic = id2topic[id]
topicnode = ET.Element('topic',{'id':id,'content':topic})
for fls in id2fls[id]:
print id,fls
flsnode = ET.Element('fls',{'content':fls,'poss':str(id2flsposs[id][fls])})
for sls in id2sls[id]:
if sls in id2fls2sls2queries[id][fls]:
slsnode = ET.Element('sls',{'content':sls,'poss':str(id2slsposs[id][sls])})
for q in id2fls2sls2queries[id][fls][sls]:
expnode = ET.Element('example')
expnode.text = q
slsnode.append(expnode)
flsnode.append(slsnode)
topicnode.append(flsnode)
root.append(topicnode)
tree = ET.ElementTree(root)
tree.write('../data/test.xml','utf8')
| [
"[email protected]"
] | |
2a8a2fea5ef6b27e5ad95edd93fb19dddb4b601a | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part005222.py | 554f9ec93f4279524a546fb6081ae0f1d20c7b74 | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,004 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher68381(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({0: 1}), [
(VariableWithCount('i2.2.1.2.2.0', 1, 1, S(0)), Add)
]),
1: (1, Multiset({1: 1}), [
(VariableWithCount('i2.4.0', 1, 1, S(0)), Add)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Add
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher68381._instance is None:
CommutativeMatcher68381._instance = CommutativeMatcher68381()
return CommutativeMatcher68381._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 68380
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.2.1.2.2.1.0_1', S(1))
except ValueError:
pass
else:
pass
# State 68382
if len(subjects) >= 1:
tmp2 = subjects.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.2.1.2.2.1.0', tmp2)
except ValueError:
pass
else:
pass
# State 68383
if len(subjects) == 0:
pass
# 0: x*d
yield 0, subst2
subjects.appendleft(tmp2)
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.4.1.0_1', S(1))
except ValueError:
pass
else:
pass
# State 68600
if len(subjects) >= 1:
tmp5 = subjects.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.4.1.0', tmp5)
except ValueError:
pass
else:
pass
# State 68601
if len(subjects) == 0:
pass
# 1: x*f
yield 1, subst2
subjects.appendleft(tmp5)
if len(subjects) >= 1 and isinstance(subjects[0], Mul):
tmp7 = subjects.popleft()
associative1 = tmp7
associative_type1 = type(tmp7)
subjects8 = deque(tmp7._args)
matcher = CommutativeMatcher68385.get()
tmp9 = subjects8
subjects8 = []
for s in tmp9:
matcher.add_subject(s)
for pattern_index, subst1 in matcher.match(tmp9, subst0):
pass
if pattern_index == 0:
pass
# State 68386
if len(subjects) == 0:
pass
# 0: x*d
yield 0, subst1
if pattern_index == 1:
pass
# State 68602
if len(subjects) == 0:
pass
# 1: x*f
yield 1, subst1
subjects.appendleft(tmp7)
return
yield
from .generated_part005223 import *
from matchpy.matching.many_to_one import CommutativeMatcher
from collections import deque
from matchpy.utils import VariableWithCount
from multiset import Multiset | [
"[email protected]"
] | |
e13db41af418a2896e05121c9e2d591d24eaa882 | 9b6b3f4b30e9bd8a821d8df16bd71e62b9c6eb98 | /day2/data_structs/conversion_4.py | eedfdf0af2d6d721cb076e44d17b34e8eb93b27a | [] | no_license | shobhit-nigam/snape_mar | b7f2155cfcd83482230c339fe45f9ea851061318 | b7b33a767cc00d35a22e40c940b4331e4898c8d5 | refs/heads/main | 2023-03-25T05:44:21.244078 | 2021-03-26T05:27:28 | 2021-03-26T05:27:28 | 350,555,721 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | # sorted
avengers = {'captain':'shield', 'ironman':'suit', 'hulk':['smash', 'science'], 'black widow':'energy'}
xmen = ['mystique', 'magneto', 'wolverine']
dc = ('wonder woman', 'batman', 'flash')
stra = "hello"
print(list(stra))
print(sorted(stra))
print(sorted(dc))
print(sorted(xmen))
print(sorted(avengers))
| [
"[email protected]"
] | |
f20916f8c9c13c2a31dbcb18a07523b3185ae3d5 | aca8fc8c2a2de84e94f120e9ca8b12d152bc7cfa | /tests/test_fields_email.py | b9787d20611a2eea1172b40efdcc788bb790da58 | [] | no_license | habibutsu/yadm | de30b364edd40917b2b25457f76cec908f2ffd3d | b3b9f2fdd5987c718b9db600fd7881630bfef944 | refs/heads/master | 2022-12-14T22:14:57.190430 | 2019-03-20T15:52:13 | 2019-04-04T15:52:29 | 296,621,139 | 0 | 0 | null | 2020-09-18T12:55:49 | 2020-09-18T12:55:48 | null | UTF-8 | Python | false | false | 417 | py | import pytest
from yadm.documents import Document
from yadm.fields.email import EmailField, InvalidEmail
class Doc(Document):
e = EmailField()
def test_ok():
doc = Doc()
doc.e = '[email protected]'
assert doc.e == '[email protected]'
@pytest.mark.parametrize('bad_email', ['EmA.iL', 'E@mA@iL', 'EmAiL@'])
def test_error(bad_email):
doc = Doc()
with pytest.raises(InvalidEmail):
doc.e = bad_email
| [
"[email protected]"
] | |
c8e26e30e21138ec04c30db6579b6bd98a620898 | 55de20ff6a7b3e07cffae42d2d9b24178f65daf3 | /dockerhub_show_tags.py | 8c0568a7f12d0b05a91faf4c381b10a296ff8bb3 | [] | no_license | srjayep/pytools | 35f803f1adcc1e93f489475ee12c72ec10161649 | c96b752c7d8679e7dde1657914fa56bd9b4f2cfd | refs/heads/master | 2020-05-29T08:51:34.003012 | 2016-10-05T15:09:05 | 2016-10-05T15:09:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,117 | py | #!/usr/bin/env python
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2016-05-10 11:26:49 +0100 (Tue, 10 May 2016)
#
# https://github.com/harisekhon/pytools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help improve this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
"""
Tool to show Docker tags for one or more DockerHub repos
Written for convenience as Docker CLI doesn't currently support this:
See https://github.com/docker/docker/issues/17238
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
#from __future__ import unicode_literals
import json
import logging
import os
import sys
import traceback
import urllib
try:
import requests
except ImportError:
print(traceback.format_exc(), end='')
sys.exit(4)
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
from harisekhon.utils import log, die, prog, isJson, jsonpp
from harisekhon import CLI
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.3'
class DockerHubTags(CLI):
def __init__(self):
# Python 2.x
super(DockerHubTags, self).__init__()
# Python 3.x
# super().__init__()
self._CLI__parser.usage = '{0} [options] repo1 repo2 ...'.format(prog)
self.quiet = False
self.timeout_default = 30
def add_options(self):
self.add_opt('-q', '--quiet', action='store_true', default=False,
help='Output only the tags, one per line (useful for shell tricks)')
def run(self):
if not self.args:
self.usage('no repos given as args')
self.quiet = self.get_opt('quiet')
if not self.quiet:
print('\nDockerHub\n')
for arg in self.args:
self.print_tags(arg)
def print_tags(self, repo):
if not self.quiet:
print('repo: {0}'.format(repo))
print('tags: ', end='')
sys.stdout.flush()
indent = ' '
if self.quiet:
indent = ''
print('\n{0}'.format(indent).join(self.get_tags(repo)))
if not self.quiet:
print()
@staticmethod
def get_tags(repo):
namespace = 'library'
if '/' in repo:
(namespace, repo) = repo.split('/', 2)
url = 'https://registry.hub.docker.com/v2/repositories/{0}/{1}/tags/'\
.format(urllib.quote_plus(namespace), urllib.quote_plus(repo))
log.debug('GET %s' % url)
try:
verify = True
# workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok
#if os.getenv('TRAVIS'):
# verify = False
req = requests.get(url, verify=verify)
except requests.exceptions.RequestException as _:
die(_)
log.debug("response: %s %s", req.status_code, req.reason)
log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80)
if req.status_code != 200:
die("%s %s" % (req.status_code, req.reason))
if not isJson(req.content):
die('invalid non-JSON response from DockerHub!')
if log.isEnabledFor(logging.DEBUG):
print(jsonpp(req.content))
print('='*80)
tag_list = []
try:
j = json.loads(req.content)
tag_list = [_['name'] for _ in j['results']]
except KeyError as _:
die('failed to parse output from DockerHub (format may have changed?): {0}'.format(_))
tag_list.sort()
# put latest to the top of the list
try:
tag_list.insert(0, tag_list.pop(tag_list.index('latest')))
except ValueError:
pass
return tag_list
if __name__ == '__main__':
DockerHubTags().main()
| [
"[email protected]"
] | |
a97f507d20a0aa26e54224831227833026e7eac6 | b7fab13642988c0e6535fb75ef6cb3548671d338 | /tools/ydk-py-master/cisco-ios-xr/ydk/models/cisco_ios_xr/openconfig_vlan.py | ef926795da3091805a43802d719560c409548218 | [
"Apache-2.0"
] | permissive | juancsosap/yangtraining | 6ad1b8cf89ecdebeef094e4238d1ee95f8eb0824 | 09d8bcc3827575a45cb8d5d27186042bf13ea451 | refs/heads/master | 2022-08-05T01:59:22.007845 | 2019-08-01T15:53:08 | 2019-08-01T15:53:08 | 200,079,665 | 0 | 1 | null | 2021-12-13T20:06:17 | 2019-08-01T15:54:15 | Python | UTF-8 | Python | false | false | 41,152 | py | """ openconfig_vlan
This module defines configuration and state variables for VLANs,
in addition to VLAN parameters associated with interfaces
"""
from ydk.entity_utils import get_relative_entity_path as _get_relative_entity_path
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YPYError, YPYModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Vlans(Entity):
"""
Container for VLAN configuration and state
variables
.. attribute:: vlan
Configured VLANs keyed by id
**type**\: list of :py:class:`Vlan <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan>`
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans, self).__init__()
self._top_entity = None
self.yang_name = "vlans"
self.yang_parent_name = "openconfig-vlan"
self.vlan = YList(self)
def __setattr__(self, name, value):
self._check_monkey_patching_error(name, value)
with _handle_type_error():
if name in self.__dict__ and isinstance(self.__dict__[name], YList):
raise YPYModelError("Attempt to assign value of '{}' to YList ldata. "
"Please use list append or extend method."
.format(value))
if isinstance(value, Enum.YLeaf):
value = value.name
if name in () and name in self.__dict__:
if isinstance(value, YLeaf):
self.__dict__[name].set(value.get())
elif isinstance(value, YLeafList):
super(Vlans, self).__setattr__(name, value)
else:
self.__dict__[name].set(value)
else:
if hasattr(value, "parent") and name != "parent":
if hasattr(value, "is_presence_container") and value.is_presence_container:
value.parent = self
elif value.parent is None and value.yang_name in self._children_yang_names:
value.parent = self
super(Vlans, self).__setattr__(name, value)
class Vlan(Entity):
"""
Configured VLANs keyed by id
.. attribute:: vlan_id <key>
references the configured vlan\-id
**type**\: int
**range:** 1..4094
**refers to**\: :py:class:`vlan_id <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Config>`
.. attribute:: config
Configuration parameters for VLANs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Config>`
.. attribute:: members
Enclosing container for list of member interfaces
**type**\: :py:class:`Members <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Members>`
.. attribute:: state
State variables for VLANs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.State>`
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan, self).__init__()
self.yang_name = "vlan"
self.yang_parent_name = "vlans"
self.vlan_id = YLeaf(YType.str, "vlan-id")
self.config = Vlans.Vlan.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.members = Vlans.Vlan.Members()
self.members.parent = self
self._children_name_map["members"] = "members"
self._children_yang_names.add("members")
self.state = Vlans.Vlan.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
def __setattr__(self, name, value):
self._check_monkey_patching_error(name, value)
with _handle_type_error():
if name in self.__dict__ and isinstance(self.__dict__[name], YList):
raise YPYModelError("Attempt to assign value of '{}' to YList ldata. "
"Please use list append or extend method."
.format(value))
if isinstance(value, Enum.YLeaf):
value = value.name
if name in ("vlan_id") and name in self.__dict__:
if isinstance(value, YLeaf):
self.__dict__[name].set(value.get())
elif isinstance(value, YLeafList):
super(Vlans.Vlan, self).__setattr__(name, value)
else:
self.__dict__[name].set(value)
else:
if hasattr(value, "parent") and name != "parent":
if hasattr(value, "is_presence_container") and value.is_presence_container:
value.parent = self
elif value.parent is None and value.yang_name in self._children_yang_names:
value.parent = self
super(Vlans.Vlan, self).__setattr__(name, value)
class Config(Entity):
"""
Configuration parameters for VLANs
.. attribute:: name
Interface VLAN name
**type**\: str
.. attribute:: status
Admin state of the VLAN
**type**\: :py:class:`Status <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Config.Status>`
**default value**\: ACTIVE
.. attribute:: tpid
Optionally set the tag protocol identifier field (TPID) that is accepted on the VLAN
**type**\: :py:class:`Tpid_Types <ydk.models.openconfig.openconfig_vlan_types.Tpid_Types>`
**default value**\: oc-vlan-types:TPID_0x8100
.. attribute:: vlan_id
Interface VLAN id
**type**\: int
**range:** 1..4094
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "vlan"
self.name = YLeaf(YType.str, "name")
self.status = YLeaf(YType.enumeration, "status")
self.tpid = YLeaf(YType.identityref, "tpid")
self.vlan_id = YLeaf(YType.uint16, "vlan-id")
def __setattr__(self, name, value):
self._check_monkey_patching_error(name, value)
with _handle_type_error():
if name in self.__dict__ and isinstance(self.__dict__[name], YList):
raise YPYModelError("Attempt to assign value of '{}' to YList ldata. "
"Please use list append or extend method."
.format(value))
if isinstance(value, Enum.YLeaf):
value = value.name
if name in ("name",
"status",
"tpid",
"vlan_id") and name in self.__dict__:
if isinstance(value, YLeaf):
self.__dict__[name].set(value.get())
elif isinstance(value, YLeafList):
super(Vlans.Vlan.Config, self).__setattr__(name, value)
else:
self.__dict__[name].set(value)
else:
if hasattr(value, "parent") and name != "parent":
if hasattr(value, "is_presence_container") and value.is_presence_container:
value.parent = self
elif value.parent is None and value.yang_name in self._children_yang_names:
value.parent = self
super(Vlans.Vlan.Config, self).__setattr__(name, value)
class Status(Enum):
"""
Status
Admin state of the VLAN
.. data:: ACTIVE = 0
VLAN is active
.. data:: SUSPENDED = 1
VLAN is inactive / suspended
"""
ACTIVE = Enum.YLeaf(0, "ACTIVE")
SUSPENDED = Enum.YLeaf(1, "SUSPENDED")
def has_data(self):
return (
self.name.is_set or
self.status.is_set or
self.tpid.is_set or
self.vlan_id.is_set)
def has_operation(self):
return (
self.yfilter != YFilter.not_set or
self.name.yfilter != YFilter.not_set or
self.status.yfilter != YFilter.not_set or
self.tpid.yfilter != YFilter.not_set or
self.vlan_id.yfilter != YFilter.not_set)
def get_segment_path(self):
path_buffer = ""
path_buffer = "config" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
raise YPYModelError("ancestor cannot be None as one of the ancestors is a list")
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
if (self.name.is_set or self.name.yfilter != YFilter.not_set):
leaf_name_data.append(self.name.get_name_leafdata())
if (self.status.is_set or self.status.yfilter != YFilter.not_set):
leaf_name_data.append(self.status.get_name_leafdata())
if (self.tpid.is_set or self.tpid.yfilter != YFilter.not_set):
leaf_name_data.append(self.tpid.get_name_leafdata())
if (self.vlan_id.is_set or self.vlan_id.yfilter != YFilter.not_set):
leaf_name_data.append(self.vlan_id.get_name_leafdata())
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
return None
def has_leaf_or_child_of_name(self, name):
if(name == "name" or name == "status" or name == "tpid" or name == "vlan-id"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
if(value_path == "name"):
self.name = value
self.name.value_namespace = name_space
self.name.value_namespace_prefix = name_space_prefix
if(value_path == "status"):
self.status = value
self.status.value_namespace = name_space
self.status.value_namespace_prefix = name_space_prefix
if(value_path == "tpid"):
self.tpid = value
self.tpid.value_namespace = name_space
self.tpid.value_namespace_prefix = name_space_prefix
if(value_path == "vlan-id"):
self.vlan_id = value
self.vlan_id.value_namespace = name_space
self.vlan_id.value_namespace_prefix = name_space_prefix
class State(Entity):
"""
State variables for VLANs
.. attribute:: name
Interface VLAN name
**type**\: str
.. attribute:: status
Admin state of the VLAN
**type**\: :py:class:`Status <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.State.Status>`
**default value**\: ACTIVE
.. attribute:: tpid
Optionally set the tag protocol identifier field (TPID) that is accepted on the VLAN
**type**\: :py:class:`Tpid_Types <ydk.models.openconfig.openconfig_vlan_types.Tpid_Types>`
**default value**\: oc-vlan-types:TPID_0x8100
.. attribute:: vlan_id
Interface VLAN id
**type**\: int
**range:** 1..4094
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "vlan"
self.name = YLeaf(YType.str, "name")
self.status = YLeaf(YType.enumeration, "status")
self.tpid = YLeaf(YType.identityref, "tpid")
self.vlan_id = YLeaf(YType.uint16, "vlan-id")
def __setattr__(self, name, value):
self._check_monkey_patching_error(name, value)
with _handle_type_error():
if name in self.__dict__ and isinstance(self.__dict__[name], YList):
raise YPYModelError("Attempt to assign value of '{}' to YList ldata. "
"Please use list append or extend method."
.format(value))
if isinstance(value, Enum.YLeaf):
value = value.name
if name in ("name",
"status",
"tpid",
"vlan_id") and name in self.__dict__:
if isinstance(value, YLeaf):
self.__dict__[name].set(value.get())
elif isinstance(value, YLeafList):
super(Vlans.Vlan.State, self).__setattr__(name, value)
else:
self.__dict__[name].set(value)
else:
if hasattr(value, "parent") and name != "parent":
if hasattr(value, "is_presence_container") and value.is_presence_container:
value.parent = self
elif value.parent is None and value.yang_name in self._children_yang_names:
value.parent = self
super(Vlans.Vlan.State, self).__setattr__(name, value)
class Status(Enum):
"""
Status
Admin state of the VLAN
.. data:: ACTIVE = 0
VLAN is active
.. data:: SUSPENDED = 1
VLAN is inactive / suspended
"""
ACTIVE = Enum.YLeaf(0, "ACTIVE")
SUSPENDED = Enum.YLeaf(1, "SUSPENDED")
def has_data(self):
return (
self.name.is_set or
self.status.is_set or
self.tpid.is_set or
self.vlan_id.is_set)
def has_operation(self):
return (
self.yfilter != YFilter.not_set or
self.name.yfilter != YFilter.not_set or
self.status.yfilter != YFilter.not_set or
self.tpid.yfilter != YFilter.not_set or
self.vlan_id.yfilter != YFilter.not_set)
def get_segment_path(self):
path_buffer = ""
path_buffer = "state" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
raise YPYModelError("ancestor cannot be None as one of the ancestors is a list")
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
if (self.name.is_set or self.name.yfilter != YFilter.not_set):
leaf_name_data.append(self.name.get_name_leafdata())
if (self.status.is_set or self.status.yfilter != YFilter.not_set):
leaf_name_data.append(self.status.get_name_leafdata())
if (self.tpid.is_set or self.tpid.yfilter != YFilter.not_set):
leaf_name_data.append(self.tpid.get_name_leafdata())
if (self.vlan_id.is_set or self.vlan_id.yfilter != YFilter.not_set):
leaf_name_data.append(self.vlan_id.get_name_leafdata())
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
return None
def has_leaf_or_child_of_name(self, name):
if(name == "name" or name == "status" or name == "tpid" or name == "vlan-id"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
if(value_path == "name"):
self.name = value
self.name.value_namespace = name_space
self.name.value_namespace_prefix = name_space_prefix
if(value_path == "status"):
self.status = value
self.status.value_namespace = name_space
self.status.value_namespace_prefix = name_space_prefix
if(value_path == "tpid"):
self.tpid = value
self.tpid.value_namespace = name_space
self.tpid.value_namespace_prefix = name_space_prefix
if(value_path == "vlan-id"):
self.vlan_id = value
self.vlan_id.value_namespace = name_space
self.vlan_id.value_namespace_prefix = name_space_prefix
class Members(Entity):
"""
Enclosing container for list of member interfaces
.. attribute:: member
List of references to interfaces / subinterfaces associated with the VLAN
**type**\: list of :py:class:`Member <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Members.Member>`
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan.Members, self).__init__()
self.yang_name = "members"
self.yang_parent_name = "vlan"
self.member = YList(self)
def __setattr__(self, name, value):
self._check_monkey_patching_error(name, value)
with _handle_type_error():
if name in self.__dict__ and isinstance(self.__dict__[name], YList):
raise YPYModelError("Attempt to assign value of '{}' to YList ldata. "
"Please use list append or extend method."
.format(value))
if isinstance(value, Enum.YLeaf):
value = value.name
if name in () and name in self.__dict__:
if isinstance(value, YLeaf):
self.__dict__[name].set(value.get())
elif isinstance(value, YLeafList):
super(Vlans.Vlan.Members, self).__setattr__(name, value)
else:
self.__dict__[name].set(value)
else:
if hasattr(value, "parent") and name != "parent":
if hasattr(value, "is_presence_container") and value.is_presence_container:
value.parent = self
elif value.parent is None and value.yang_name in self._children_yang_names:
value.parent = self
super(Vlans.Vlan.Members, self).__setattr__(name, value)
class Member(Entity):
"""
List of references to interfaces / subinterfaces
associated with the VLAN.
.. attribute:: interface_ref
Reference to an interface or subinterface
**type**\: :py:class:`InterfaceRef <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Members.Member.InterfaceRef>`
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan.Members.Member, self).__init__()
self.yang_name = "member"
self.yang_parent_name = "members"
self.interface_ref = Vlans.Vlan.Members.Member.InterfaceRef()
self.interface_ref.parent = self
self._children_name_map["interface_ref"] = "interface-ref"
self._children_yang_names.add("interface-ref")
class InterfaceRef(Entity):
"""
Reference to an interface or subinterface
.. attribute:: state
Operational state for interface\-ref
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_vlan.Vlans.Vlan.Members.Member.InterfaceRef.State>`
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan.Members.Member.InterfaceRef, self).__init__()
self.yang_name = "interface-ref"
self.yang_parent_name = "member"
self.state = Vlans.Vlan.Members.Member.InterfaceRef.State()
self.state.parent = self
self._children_name_map["state"] = "state"
self._children_yang_names.add("state")
class State(Entity):
"""
Operational state for interface\-ref
.. attribute:: interface
Reference to a base interface. If a reference to a subinterface is required, this leaf must be specified to indicate the base interface
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: subinterface
Reference to a subinterface \-\- this requires the base interface to be specified using the interface leaf in this container. If only a reference to a base interface is requuired, this leaf should not be set
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface>`
"""
_prefix = 'oc-vlan'
_revision = '2016-05-26'
def __init__(self):
super(Vlans.Vlan.Members.Member.InterfaceRef.State, self).__init__()
self.yang_name = "state"
self.yang_parent_name = "interface-ref"
self.interface = YLeaf(YType.str, "interface")
self.subinterface = YLeaf(YType.str, "subinterface")
def __setattr__(self, name, value):
self._check_monkey_patching_error(name, value)
with _handle_type_error():
if name in self.__dict__ and isinstance(self.__dict__[name], YList):
raise YPYModelError("Attempt to assign value of '{}' to YList ldata. "
"Please use list append or extend method."
.format(value))
if isinstance(value, Enum.YLeaf):
value = value.name
if name in ("interface",
"subinterface") and name in self.__dict__:
if isinstance(value, YLeaf):
self.__dict__[name].set(value.get())
elif isinstance(value, YLeafList):
super(Vlans.Vlan.Members.Member.InterfaceRef.State, self).__setattr__(name, value)
else:
self.__dict__[name].set(value)
else:
if hasattr(value, "parent") and name != "parent":
if hasattr(value, "is_presence_container") and value.is_presence_container:
value.parent = self
elif value.parent is None and value.yang_name in self._children_yang_names:
value.parent = self
super(Vlans.Vlan.Members.Member.InterfaceRef.State, self).__setattr__(name, value)
def has_data(self):
return (
self.interface.is_set or
self.subinterface.is_set)
def has_operation(self):
return (
self.yfilter != YFilter.not_set or
self.interface.yfilter != YFilter.not_set or
self.subinterface.yfilter != YFilter.not_set)
def get_segment_path(self):
path_buffer = ""
path_buffer = "state" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
raise YPYModelError("ancestor cannot be None as one of the ancestors is a list")
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
if (self.interface.is_set or self.interface.yfilter != YFilter.not_set):
leaf_name_data.append(self.interface.get_name_leafdata())
if (self.subinterface.is_set or self.subinterface.yfilter != YFilter.not_set):
leaf_name_data.append(self.subinterface.get_name_leafdata())
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
return None
def has_leaf_or_child_of_name(self, name):
if(name == "interface" or name == "subinterface"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
if(value_path == "interface"):
self.interface = value
self.interface.value_namespace = name_space
self.interface.value_namespace_prefix = name_space_prefix
if(value_path == "subinterface"):
self.subinterface = value
self.subinterface.value_namespace = name_space
self.subinterface.value_namespace_prefix = name_space_prefix
def has_data(self):
return (self.state is not None and self.state.has_data())
def has_operation(self):
return (
self.yfilter != YFilter.not_set or
(self.state is not None and self.state.has_operation()))
def get_segment_path(self):
path_buffer = ""
path_buffer = "interface-ref" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
raise YPYModelError("ancestor cannot be None as one of the ancestors is a list")
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
if (child_yang_name == "state"):
if (self.state is None):
self.state = Vlans.Vlan.Members.Member.InterfaceRef.State()
self.state.parent = self
self._children_name_map["state"] = "state"
return self.state
return None
def has_leaf_or_child_of_name(self, name):
if(name == "state"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
pass
def has_data(self):
return (self.interface_ref is not None and self.interface_ref.has_data())
def has_operation(self):
return (
self.yfilter != YFilter.not_set or
(self.interface_ref is not None and self.interface_ref.has_operation()))
def get_segment_path(self):
path_buffer = ""
path_buffer = "member" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
raise YPYModelError("ancestor cannot be None as one of the ancestors is a list")
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
if (child_yang_name == "interface-ref"):
if (self.interface_ref is None):
self.interface_ref = Vlans.Vlan.Members.Member.InterfaceRef()
self.interface_ref.parent = self
self._children_name_map["interface_ref"] = "interface-ref"
return self.interface_ref
return None
def has_leaf_or_child_of_name(self, name):
if(name == "interface-ref"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
pass
def has_data(self):
for c in self.member:
if (c.has_data()):
return True
return False
def has_operation(self):
for c in self.member:
if (c.has_operation()):
return True
return self.yfilter != YFilter.not_set
def get_segment_path(self):
path_buffer = ""
path_buffer = "members" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
raise YPYModelError("ancestor cannot be None as one of the ancestors is a list")
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
if (child_yang_name == "member"):
for c in self.member:
segment = c.get_segment_path()
if (segment_path == segment):
return c
c = Vlans.Vlan.Members.Member()
c.parent = self
local_reference_key = "ydk::seg::%s" % segment_path
self._local_refs[local_reference_key] = c
self.member.append(c)
return c
return None
def has_leaf_or_child_of_name(self, name):
if(name == "member"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
pass
def has_data(self):
return (
self.vlan_id.is_set or
(self.config is not None and self.config.has_data()) or
(self.members is not None and self.members.has_data()) or
(self.state is not None and self.state.has_data()))
def has_operation(self):
return (
self.yfilter != YFilter.not_set or
self.vlan_id.yfilter != YFilter.not_set or
(self.config is not None and self.config.has_operation()) or
(self.members is not None and self.members.has_operation()) or
(self.state is not None and self.state.has_operation()))
def get_segment_path(self):
path_buffer = ""
path_buffer = "vlan" + "[vlan-id='" + self.vlan_id.get() + "']" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (ancestor is None):
path_buffer = "openconfig-vlan:vlans/%s" % self.get_segment_path()
else:
path_buffer = _get_relative_entity_path(self, ancestor, path_buffer)
leaf_name_data = LeafDataList()
if (self.vlan_id.is_set or self.vlan_id.yfilter != YFilter.not_set):
leaf_name_data.append(self.vlan_id.get_name_leafdata())
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
if (child_yang_name == "config"):
if (self.config is None):
self.config = Vlans.Vlan.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
return self.config
if (child_yang_name == "members"):
if (self.members is None):
self.members = Vlans.Vlan.Members()
self.members.parent = self
self._children_name_map["members"] = "members"
return self.members
if (child_yang_name == "state"):
if (self.state is None):
self.state = Vlans.Vlan.State()
self.state.parent = self
self._children_name_map["state"] = "state"
return self.state
return None
def has_leaf_or_child_of_name(self, name):
if(name == "config" or name == "members" or name == "state" or name == "vlan-id"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
if(value_path == "vlan-id"):
self.vlan_id = value
self.vlan_id.value_namespace = name_space
self.vlan_id.value_namespace_prefix = name_space_prefix
def has_data(self):
for c in self.vlan:
if (c.has_data()):
return True
return False
def has_operation(self):
for c in self.vlan:
if (c.has_operation()):
return True
return self.yfilter != YFilter.not_set
def get_segment_path(self):
path_buffer = ""
path_buffer = "openconfig-vlan:vlans" + path_buffer
return path_buffer
def get_entity_path(self, ancestor):
path_buffer = ""
if (not ancestor is None):
raise YPYModelError("ancestor has to be None for top-level node")
path_buffer = self.get_segment_path()
leaf_name_data = LeafDataList()
entity_path = EntityPath(path_buffer, leaf_name_data)
return entity_path
def get_child_by_name(self, child_yang_name, segment_path):
child = self._get_child_by_seg_name([child_yang_name, segment_path])
if child is not None:
return child
if (child_yang_name == "vlan"):
for c in self.vlan:
segment = c.get_segment_path()
if (segment_path == segment):
return c
c = Vlans.Vlan()
c.parent = self
local_reference_key = "ydk::seg::%s" % segment_path
self._local_refs[local_reference_key] = c
self.vlan.append(c)
return c
return None
def has_leaf_or_child_of_name(self, name):
if(name == "vlan"):
return True
return False
def set_value(self, value_path, value, name_space, name_space_prefix):
pass
def clone_ptr(self):
self._top_entity = Vlans()
return self._top_entity
| [
"[email protected]"
] | |
779e7b1fc2bfe837f10a8070b3600f71ae8cdf3a | ece7ba486d29d4bc3e87c2046db2c31140e2d86a | /suitcase/mongo_normalized/tests/tests.py | 75f4046965a77899a78b88195844aeadf0dfc188 | [] | no_license | ke-zhang-rd/suitcase-mongo | 31b97bb13b9e6089248f888a6c33824b835de141 | c938bae589ab2fba301814c846c5d5339eb90fb8 | refs/heads/master | 2020-05-31T10:29:15.458932 | 2019-10-18T17:33:03 | 2019-10-18T17:33:03 | 190,241,607 | 0 | 0 | null | 2019-06-04T16:38:12 | 2019-06-04T16:38:11 | null | UTF-8 | Python | false | false | 451 | py | # Tests should generate (and then clean up) any files they need for testing. No
# binary files should be included in the repository.
from suitcase.mongo_normalized import Serializer
def test_export(db_factory, example_data):
documents = example_data()
metadatastore_db = db_factory()
asset_registry_db = db_factory()
serializer = Serializer(metadatastore_db, asset_registry_db)
for item in documents:
serializer(*item)
| [
"[email protected]"
] | |
39ac82b5900d8bff567825bc91b455a0be5074b1 | 4a7804ee05485c345b4e3c39a0c96ed4012542ac | /system/base/less/actions.py | fd506df396c789af3c5b6f73302212caf721a16d | [] | no_license | Erick-Pardus/Pardus | 1fef143c117c62a40e3779c3d09f5fd49b5a6f5c | 2693e89d53304a216a8822978e13f646dce9b1d3 | refs/heads/master | 2020-12-31T02:49:33.189799 | 2013-03-17T06:29:33 | 2013-03-17T06:29:33 | 17,247,989 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2005-2009 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def setup():
autotools.autoreconf("-vfi")
autotools.configure()
def build():
autotools.make()
def install():
autotools.rawInstall('DESTDIR="%s"' % get.installDIR())
#pisitools.dobin("less")
#pisitools.dobin("lessecho")
#pisitools.dobin("lesskey")
#pisitools.newman("lesskey.nro", "lesskey.1")
#pisitools.newman("less.nro", "less.1")
pisitools.dodoc("NEWS", "README", "COPYING")
| [
"[email protected]"
] | |
eda2d7a7d548e568bc5fb77caddeb16bfc3b87a0 | 861c248aab85784542fab84eeccedda6c90682d9 | /msgtracker/apps/collector.py | 57d9013ce26b082eb333ef71a105496cc2632ede | [
"MIT"
] | permissive | daleysoftware/msg-tracker | c91cd67e7466c04574c2ed5256a2a0f931dd8647 | 16edb9d555795d0eec625dd954e14f914cbbbe2b | refs/heads/master | 2022-05-16T02:58:44.083469 | 2017-03-01T23:43:24 | 2017-03-01T23:43:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,558 | py | import sched
import time
import sys
import logging
import msgtracker
import datetime
import signal
scheduler = sched.scheduler(time.time, time.sleep)
def _collect_and_log_forever(slack_client):
"""
Collect data from slack API and log in redis. Backend handles logging format. Run forever.
"""
wait_minutes = msgtracker.constants.QUERY_INTERVAL_MINUTES
try:
logging.info("Collect and log sequence queued.")
sample_time = datetime.datetime.now()
logging.debug("Sample time for this collection round: %s" % sample_time.strftime('%s'))
for user in slack_client.get_active_users():
msgtracker.backend.log_active(user, sample_time)
except IOError as e:
wait_minutes = 1
logging.error("IO error during collection round, retry soon. Error: %s" % e)
# And enter on the scheduler to keep things rolling.
logging.info("Wait %s minutes." % wait_minutes)
scheduler.enter(wait_minutes * 60, 1, _collect_and_log_forever, argument=(slack_client,))
def signal_handler(signum, frame):
print() # Cosmetics.
logging.error("Received signal. Abort.")
sys.exit(1)
def main(slack_client):
"""
Main program. Kick off scheduler and run forever.
"""
signal.signal(signal.SIGINT, signal_handler)
scheduler.enter(0, 1, _collect_and_log_forever, argument=(slack_client,))
scheduler.run()
if __name__ == '__main__':
msgtracker.helper.logging.init()
logging.info("Starting collector service.")
main(msgtracker.endpoints.slack.Slack())
| [
"[email protected]"
] | |
977aa0a76af026f61c509844bb37c9a7e0e2603a | eb7c15f59f0863b457b272849930dce4ef92e58c | /znc/run | 09e03a3c11edded93c1dd153409c21e45d5db281 | [] | no_license | dozymoe/runit-init-scripts | 40492bc11b9a7f5f974088e7b5e870d97f54354a | ddb8915c6f2da8a5c2acdb5e09f33bc6c027ccdb | refs/heads/master | 2021-01-22T07:06:29.736090 | 2014-06-19T01:32:08 | 2014-06-19T01:32:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | #!/usr/bin/env python
import logging
import os
import sys
from time import sleep
# import external library
sys.path.append('/var/service')
from runit_helper import (
MAXIMUM_CRASHES_DELAY,
check_crash_quota,
check_dependencies,
get_logger,
run,
)
service_name = 'znc'
log = get_logger(service_name, logging.INFO)
check_dependencies(service_name, log)
if check_crash_quota(service_name):
sleep(MAXIMUM_CRASHES_DELAY)
exit(0)
log.info('starting..')
run('/usr/bin/znc', ['--foreground'])
| [
"[email protected]"
] | ||
4b64a051e30b954139e58857c0e08c141725d3be | 8f1d6f17d3bdad867518b7b0a164adfe6aeeed95 | /recognition/vpl/backbones/iresnet.py | c6d3b9c240c24687d432197f976ee01fbf423216 | [
"MIT",
"LicenseRef-scancode-proprietary-license"
] | permissive | xwyangjshb/insightface | 2c7f030a5d1f5a24b18967bd0d775ee33933d37f | ae233babaf7614ef4ef28dac0171205835d78d64 | refs/heads/master | 2022-09-29T07:49:22.944700 | 2022-09-22T11:36:12 | 2022-09-22T11:36:12 | 221,020,460 | 1 | 0 | MIT | 2019-11-11T16:16:56 | 2019-11-11T16:16:55 | null | UTF-8 | Python | false | false | 7,149 | py | import torch
from torch import nn
__all__ = ['iresnet18', 'iresnet34', 'iresnet50', 'iresnet100', 'iresnet200']
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes,
out_planes,
kernel_size=1,
stride=stride,
bias=False)
class IBasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None,
groups=1, base_width=64, dilation=1):
super(IBasicBlock, self).__init__()
if groups != 1 or base_width != 64:
raise ValueError('BasicBlock only supports groups=1 and base_width=64')
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
self.bn1 = nn.BatchNorm2d(inplanes, eps=1e-05,)
self.conv1 = conv3x3(inplanes, planes)
self.bn2 = nn.BatchNorm2d(planes, eps=1e-05,)
self.prelu = nn.PReLU(planes)
self.conv2 = conv3x3(planes, planes, stride)
self.bn3 = nn.BatchNorm2d(planes, eps=1e-05,)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.bn1(x)
out = self.conv1(out)
out = self.bn2(out)
out = self.prelu(out)
out = self.conv2(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
return out
class IResNet(nn.Module):
fc_scale = 7 * 7
def __init__(self,
block, layers, dropout=0, num_features=512, zero_init_residual=False,
groups=1, width_per_group=64, replace_stride_with_dilation=None, fp16=False):
super(IResNet, self).__init__()
self.fp16 = fp16
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError("replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation))
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes, eps=1e-05)
self.prelu = nn.PReLU(self.inplanes)
self.layer1 = self._make_layer(block, 64, layers[0], stride=2)
self.layer2 = self._make_layer(block,
128,
layers[1],
stride=2,
dilate=replace_stride_with_dilation[0])
self.layer3 = self._make_layer(block,
256,
layers[2],
stride=2,
dilate=replace_stride_with_dilation[1])
self.layer4 = self._make_layer(block,
512,
layers[3],
stride=2,
dilate=replace_stride_with_dilation[2])
self.bn2 = nn.BatchNorm2d(512 * block.expansion, eps=1e-05,)
self.dropout = nn.Dropout(p=dropout, inplace=True)
self.fc = nn.Linear(512 * block.expansion * self.fc_scale, num_features)
self.features = nn.BatchNorm1d(num_features, eps=1e-05)
nn.init.constant_(self.features.weight, 1.0)
self.features.weight.requires_grad = False
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, 0, 0.1)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
if zero_init_residual:
for m in self.modules():
if isinstance(m, IBasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
nn.BatchNorm2d(planes * block.expansion, eps=1e-05, ),
)
layers = []
layers.append(
block(self.inplanes, planes, stride, downsample, self.groups,
self.base_width, previous_dilation))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(
block(self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation))
return nn.Sequential(*layers)
def forward(self, x):
with torch.cuda.amp.autocast(self.fp16):
x = self.conv1(x)
x = self.bn1(x)
x = self.prelu(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.bn2(x)
x = torch.flatten(x, 1)
x = self.dropout(x)
x = self.fc(x.float() if self.fp16 else x)
x = self.features(x)
return x
def _iresnet(arch, block, layers, pretrained, progress, **kwargs):
model = IResNet(block, layers, **kwargs)
if pretrained:
raise ValueError()
return model
def iresnet18(pretrained=False, progress=True, **kwargs):
return _iresnet('iresnet18', IBasicBlock, [2, 2, 2, 2], pretrained,
progress, **kwargs)
def iresnet34(pretrained=False, progress=True, **kwargs):
return _iresnet('iresnet34', IBasicBlock, [3, 4, 6, 3], pretrained,
progress, **kwargs)
def iresnet50(pretrained=False, progress=True, **kwargs):
return _iresnet('iresnet50', IBasicBlock, [3, 4, 14, 3], pretrained,
progress, **kwargs)
def iresnet100(pretrained=False, progress=True, **kwargs):
return _iresnet('iresnet100', IBasicBlock, [3, 13, 30, 3], pretrained,
progress, **kwargs)
def iresnet200(pretrained=False, progress=True, **kwargs):
return _iresnet('iresnet200', IBasicBlock, [6, 26, 60, 6], pretrained,
progress, **kwargs)
| [
"[email protected]"
] | |
b65b8f7c48e21d63843b88ce2832a2d666bf33d7 | 32f1d0e9c2fbce7f4682b9f79cae5f3df0480de0 | /brevets/flask_brevets.py | ff59123f5a991747db42de10588f90ef1a270ae0 | [
"Artistic-2.0"
] | permissive | UO-CIS-322/proj4-brevets | b0546b3e47db78c74b4c35b52c5527c811eb8ad0 | a1600206886d324eaa3975f561ae6c7fff601b82 | refs/heads/master | 2021-01-21T21:32:21.088892 | 2017-10-13T21:29:38 | 2017-10-13T21:29:38 | 43,849,637 | 0 | 75 | null | 2017-10-22T04:51:19 | 2015-10-07T23:01:01 | Python | UTF-8 | Python | false | false | 1,854 | py | """
Replacement for RUSA ACP brevet time calculator
(see https://rusa.org/octime_acp.html)
"""
import flask
from flask import request
import arrow # Replacement for datetime, based on moment.js
import acp_times # Brevet time calculations
import config
import logging
###
# Globals
###
app = flask.Flask(__name__)
CONFIG = config.configuration()
app.secret_key = CONFIG.SECRET_KEY
###
# Pages
###
@app.route("/")
@app.route("/index")
def index():
app.logger.debug("Main page entry")
return flask.render_template('calc.html')
@app.errorhandler(404)
def page_not_found(error):
app.logger.debug("Page not found")
flask.session['linkback'] = flask.url_for("index")
return flask.render_template('404.html'), 404
###############
#
# AJAX request handlers
# These return JSON, rather than rendering pages.
#
###############
@app.route("/_calc_times")
def _calc_times():
"""
Calculates open/close times from miles, using rules
described at https://rusa.org/octime_alg.html.
Expects one URL-encoded argument, the number of miles.
"""
app.logger.debug("Got a JSON request")
km = request.args.get('km', 999, type=float)
app.logger.debug("km={}".format(km))
app.logger.debug("request.args: {}".format(request.args))
# FIXME: These probably aren't the right open and close times
# and brevets may be longer than 200km
open_time = acp_times.open_time(km, 200, arrow.now().isoformat)
close_time = acp_times.close_time(km, 200, arrow.now().isoformat)
result = {"open": open_time, "close": close_time}
return flask.jsonify(result=result)
#############
app.debug = CONFIG.DEBUG
if app.debug:
app.logger.setLevel(logging.DEBUG)
if __name__ == "__main__":
print("Opening for global access on port {}".format(CONFIG.PORT))
app.run(port=CONFIG.PORT, host="0.0.0.0")
| [
"[email protected]"
] | |
859f53a675da269d458e7153e908f2527223ac15 | bf534da18426b49dbee0a0b1870f5f3a85922855 | /ex023.py | 81005cc6e332246276e30c09bd341672794200b7 | [] | no_license | kcpedrosa/Python-exercises | 0d20a72e7e68d9fc9714e3aabf4850fdbeb7d1f8 | ae35dfad869ceb3aac186fce5161cef8a77a7579 | refs/heads/master | 2021-05-20T08:46:29.318242 | 2020-04-01T15:44:36 | 2020-04-01T15:44:36 | 252,205,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 315 | py | num = int(input('Digite um numero qualquer: '))
u = num // 1 % 10
d = num // 10 % 10
c = num // 100 % 10
m = num // 1000 % 10
print('Analisando o numero {}'.format(num))
print('A unidade vale {}'.format(u))
print('A dezena vale {}'.format(d))
print('A centena vale {}'.format(c))
print('A milhar vale {}'.format(m)) | [
"[email protected]"
] | |
994c0795da16cdc04ade8acbce51229b95fa4e8e | 5527d3854ad0840fb4a0a9893447535cd5e6ad0f | /python/ThirteenTeV/QstarToQW_M_1200_TuneCUETP8M1_13TeV_pythia8_cfi.py | 58d9d33c62bab8fd0ee915374feb779697103556 | [] | no_license | danbarto/genproductionsSummer16 | ecf2309c1627b4db3e4a1b8785ca612d9a59426f | 655ef31aa5f05d0117aeef82d107f07a1fd5d822 | refs/heads/master | 2020-03-26T23:12:37.115369 | 2018-08-21T14:23:30 | 2018-08-21T14:23:30 | 145,520,233 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,118 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(65.84),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'ExcitedFermion:dg2dStar = on',
'ExcitedFermion:ug2uStar = on',
'ExcitedFermion:Lambda = 1200',
'4000001:m0 = 1200',
'4000001:onMode = off',
'4000001:onIfMatch = 2 24',
'4000002:m0 = 1200',
'4000002:onMode = off',
'4000002:onIfMatch = 1 24',
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters')
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
ddc32b1926560d046349ee35ff5707643abd8afe | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /140_gui/pyqt_pyside/_exercises/_templates/temp/Mastering GUI Programming with Python/Chapter 3 Handling Events with Signals and Slots/signal_slots_demo.py | f79d2febefd50d50434b21a86eb7d099cee6be09 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,488 | py | # ______ ___
# ____ ? ______ ?W.. __ qtw
# ____ ? ______ ?C.. __ qtc
#
#
# c_ MainWindow ?.?W..
#
# ___ -
# s_. -
# sL.. ?.?VBL..
#
# # connecting a signal to a slot
# quitbutton _ ?.?PB.. Quit
# ?.c__.c.. cl..
# la__ .aW.. ?
#
# # connecting a signal with data to a slot that receives data
# entry1 _ ?.?LE..
# entry2 _ ?.?LE..
# la__ .aW.. ?
# la__ .aW.. ?
# _1.tC...c.. _2.sT.
#
# # connecting a signal to a python callable
# _2.tC...c.. pr..
#
# # Connecting a signal to another signal
# _1.eF__.c.. l___ print editing finished
# _2.rP__.c.. _1.eF__
#
# # This call will fail, because the signals have different argument types
# #self.entry1.textChanged.connect(self.quitbutton.clicked)
#
# # This won't work, because of signal doesn't send enough args
# badbutton _ ?.?PB.. Bad
# la__ .aW.. ?
# ?.c__.c.. n_a..
#
# # This will work, even though the signal sends extra args
# goodbutton _ ?.?PB.. Good
# la__ .aW.. ?
# ?.c__.c.. n_a..
#
#
# s..
#
# ___ needs_args arg1, arg2, arg3
# p..
#
# ___ no_args
# print('I need no arguments')
#
# __ ______ __ ______
# app _ ?.?A.. ___.a..
# # it's required to save a reference to MainWindow.
# # if it goes out of scope, it will be destroyed.
# mw _ ?
# ___.e.. ?.e..
| [
"[email protected]"
] | |
c3eb31bfb07ff76ae317c2d91ec0b1541e01c7c7 | 687fed3e95103b20b804a78659ea79e7918b6aa6 | /maec/bundle/capability.py | 481691ff26e4dd189d834fbd4d0658cf1b75d172 | [
"BSD-3-Clause"
] | permissive | geliefan/python-maec | 02886af1dd3fc07bd89a5323a81920e126a960b4 | dd539b1214f5cf1f445cd5989ce3f93e4fb3b2a8 | refs/heads/master | 2021-01-18T13:04:26.768906 | 2014-07-07T17:16:56 | 2014-07-07T17:16:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,716 | py | #MAEC Capability Class
#Copyright (c) 2014, The MITRE Corporation
#All rights reserved
#Compatible with MAEC v4.1
#Last updated 02/18/2014
import maec
import maec.bindings.maec_bundle as bundle_binding
from maec.bundle.behavior_reference import BehaviorReference
from cybox.common import VocabString, String
class CapabilityObjectiveReference(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityObjectiveReference, self).__init__()
self.objective_idref = None
def to_obj(self):
capability_objective_reference_obj = bundle_binding.CapabilityObjectiveReferenceType()
if self.objective_idref is not None: capability_objective_reference_obj.set_objective_idref(self.objective_idref)
return capability_objective_reference_obj
def to_dict(self):
capability_objective_reference_dict = {}
if self.objective_idref is not None: capability_objective_reference_dict['objective_idref'] = self.objective_idref
return capability_objective_reference_dict
@staticmethod
def from_obj(capability_objective_reference_obj):
if not capability_objective_reference_obj:
return None
capability_objective_reference_ = CapabilityObjectiveReference()
capability_objective_reference_.objective_idref = capability_objective_reference_obj.get_objective_idref()
return capability_objective_reference_
@staticmethod
def from_dict(capability_objective_reference_dict):
if not capability_objective_reference_dict:
return None
capability_objective_reference_ = CapabilityObjectiveReference()
capability_objective_reference_.objective_idref = capability_objective_reference_dict['objective_idref']
return capability_objective_reference_
class CapabilityReference(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityReference, self).__init__()
self.capability_idref = None
def to_obj(self):
capability_reference_obj = bundle_binding.CapabilityReferenceType()
if self.capability_idref is not None: capability_reference_obj.set_capability_idref(self.capability_idref)
return capability_reference_obj
def to_dict(self):
capability_reference_dict = {}
if self.capability_idref is not None: capability_reference_dict['capability_idref'] = self.capability_idref
return capability_reference_dict
@staticmethod
def from_obj(capability_reference_obj):
if not capability_reference_obj:
return None
capability_reference_ = CapabilityReference()
capability_reference_.capability_idref = capability_reference_obj.get_capability_idref()
return capability_reference_
@staticmethod
def from_dict(capability_reference_dict):
if not capability_reference_dict:
return None
capability_reference_ = CapabilityReference()
capability_reference_.capability_idref = capability_reference_dict['capability_idref']
return capability_reference_
class CapabilityObjectiveRelationship(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityObjectiveRelationship, self).__init__()
self.relationship_type = None
self.objective_reference = []
def to_obj(self):
capability_obj_rel_obj = bundle_binding.CapabilityObjectiveRelationshipType()
if self.relationship_type is not None: capability_obj_rel_obj.set_Relationship_Type(self.relationship_type.to_obj())
if self.objective_reference is not None:
for objective_ref in self.objective_reference:
capability_obj_rel_obj.add_Objective_Reference(objective_ref.to_obj())
return capability_obj_rel_obj
def to_dict(self):
capability_obj_rel_dict = {}
if self.relationship_type is not None: capability_obj_rel_dict['relationship_type'] = self.relationship_type.to_dict()
if self.objective_reference is not None:
capability_obj_rel_dict['objective_reference'] = [x.to_dict() for x in self.objective_reference]
return capability_obj_rel_dict
@staticmethod
def from_obj(capability_obj_rel_obj):
if not capability_obj_rel_obj:
return None
capability_obj_rel_ = CapabilityObjectiveRelationship()
capability_obj_rel_.relationship_type = VocabString.from_obj(capability_obj_rel_obj.get_Relationship_Type())
if capability_obj_rel_obj.get_Objective_Reference():
capability_obj_rel_.objective_reference = [CapabilityObjectiveReference.from_obj(x) for x in capability_obj_rel_obj.get_Objective_Reference()]
return capability_obj_rel_
@staticmethod
def from_dict(capability_obj_rel_dict):
if not capability_obj_rel_dict:
return None
capability_obj_rel_ = CapabilityRelationship()
capability_obj_rel_.relationship_type = VocabString.from_dict(capability_obj_rel_dict['relationship_type'])
if capability_obj_rel_dict['objective_reference']:
capability_obj_rel_.objective_reference = [CapabilityObjectiveReference.from_dict(x) for x in capability_obj_rel_dict['objective_reference']]
return capability_obj_rel_
class CapabilityRelationship(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityRelationship, self).__init__()
self.relationship_type = None
self.capability_reference = []
def to_obj(self):
capability_rel_obj = bundle_binding.CapabilityRelationshipType()
if self.relationship_type is not None: capability_rel_obj.set_Relationship_Type(self.relationship_type.to_obj())
if self.capability_reference is not None:
for capability_ref in self.capability_reference:
capability_rel_obj.add_Capability_Reference(capability_ref.to_obj())
return capability_rel_obj
def to_dict(self):
capability_rel_dict = {}
if self.relationship_type is not None: capability_rel_dict['relationship_type'] = self.relationship_type.to_dict()
if self.capability_reference is not None:
capability_rel_dict['capability_reference'] = [x.to_dict() for x in self.capability_reference]
return capability_rel_dict
@staticmethod
def from_obj(capability_rel_obj):
if not capability_rel_obj:
return None
capability_rel_ = CapabilityRelationship()
capability_rel_.relationship_type = VocabString.from_obj(capability_rel_obj.get_Relationship_Type())
if capability_rel_obj.get_Capability_Reference():
capability_rel_.capability_reference = [CapabilityReference.from_obj(x) for x in capability_rel_obj.get_Capability_Reference()]
return capability_rel_
@staticmethod
def from_dict(capability_rel_dict):
if not capability_rel_dict:
return None
capability_rel_ = CapabilityRelationship()
capability_rel_.relationship_type = VocabString.from_dict(capability_rel_dict['relationship_type'])
if capability_rel_dict['capability_reference']:
capability_rel_.capability_reference = [CapabilityReference.from_dict(x) for x in capability_rel_dict['capability_reference']]
return capability_rel_
class CapabilityObjective(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityObjective, self).__init__()
self.id_ = maec.utils.idgen.create_id(prefix="capability_objective")
self.name = None
self.description = None
self.property = []
self.behavior_reference = []
self.relationship = []
def to_obj(self):
capability_objective_obj = bundle_binding.CapabilityObjectiveType()
if self.id_ is not None: capability_objective_obj.set_id(self.id_)
if self.name is not None: capability_objective_obj.set_Name(self.name.to_obj())
if self.description is not None: capability_objective_obj.set_Description(self.description.to_obj())
if self.property:
for prop in self.property:
capability_objective_obj.add_Property(prop.to_obj())
if self.behavior_reference:
for behavior_ref in self.behavior_reference:
capability_objective_obj.add_Behavior_Reference(behavior_ref.to_obj())
if self.relationship:
for rel in self.relationship:
capability_objective_obj.add_Relationship(rel.to_obj())
return capability_objective_obj
def to_dict(self):
capability_objective_dict = {}
if self.id_ is not None: capability_objective_dict['id'] = self.id_
if self.name is not None: capability_objective_dict['name'] = self.name.to_dict()
if self.description is not None: capability_objective_dict['description'] = self.description
if self.property:
capability_objective_dict['property'] = [x.to_dict() for x in self.property]
if self.behavior_reference:
capability_objective_dict['behavior_reference'] = [x.to_dict() for x in self.behavior_reference]
if self.relationship:
capability_objective_dict['relationship'] = [x.to_dict() for x in self.relationship]
return capability_objective_dict
@staticmethod
def from_obj(capability_objective_obj):
if not capability_objective_obj:
return None
capability_objective_ = CapabilityObjective()
if capability_objective_obj.get_id(): capability_objective_.id_ = capability_objective_obj.get_id()
capability_objective_.name = VocabString.from_obj(capability_objective_obj.get_Name())
capability_objective_.description = capability_objective_obj.get_Description()
if capability_objective_obj.get_Property():
capability_objective_.property = [CapabilityProperty.from_obj(x) for x in capability_objective_obj.get_Property()]
if capability_objective_obj.get_Behavior_Reference():
capability_objective_.behavior_reference = [BehaviorReference.from_obj(x) for x in capability_objective_obj.get_Behavior_Reference()]
if capability_objective_obj.get_Relationship():
capability_objective_.relationship = [CapabilityObjectiveRelationship.from_obj(x) for x in capability_objective_obj.get_Relationship()]
return capability_objective_
@staticmethod
def from_dict(capability_objective_dict):
if not capability_objective_dict:
return None
capability_objective_ = CapabilityObjective()
if capability_objective_dict.get('id'): capability_objective_.id_ = capability_objective_dict.get('id')
capability_objective_.name = VocabString.from_dict(capability_objective_dict.get('name'))
capability_objective_.description = capability_objective_dict.get('description')
if capability_objective_dict.get('property'):
capability_objective_.property = [CapabilityProperty.from_dict(x) for x in capability_objective_dict.get('property')]
if capability_objective_dict.get('behavior_reference'):
capability_objective_.behavior_reference = [BehaviorReference.from_dict(x) for x in capability_objective_dict.get('behavior_reference')]
if capability_objective_dict.get('relationship'):
capability_objective_.relationship = [CapabilityObjectiveRelationship.from_dict(x) for x in capability_objective_dict.get('relationship')]
return capability_objective_
class CapabilityProperty(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityProperty, self).__init__()
self.name = None
self.value = None
def to_obj(self):
capability_property_obj = bundle_binding.CapabilityPropertyType()
if self.name is not None: capability_property_obj.set_Name(self.name.to_obj())
if self.value is not None: capability_property_obj.set_Value(self.value.to_obj())
return capability_property_obj
def to_dict(self):
capability_property_dict = {}
if self.name is not None: capability_property_dict['name'] = self.name.to_dict()
if self.value is not None: capability_property_dict['value'] = self.value.to_dict()
return capability_property_dict
@staticmethod
def from_obj(capability_property_obj):
if not capability_property_obj:
return None
capability_property_ = CapabilityProperty()
capability_property_.name = VocabString.from_obj(capability_property_obj.get_Name())
capability_property_.value = String.from_obj(capability_property_obj.get_Value())
return capability_property_
@staticmethod
def from_dict(capability_property_dict):
if not capability_property_dict:
return None
capability_property_ = CapabilityProperty()
capability_property_.name = VocabString.from_dict(capability_property_dict['name'])
capability_property_.value = String.from_dict(capability_property_dict['value'])
return capability_property_
class Capability(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self, id = None, name = None):
super(Capability, self).__init__()
if id:
self.id_ = id
else:
self.id_ = maec.utils.idgen.create_id(prefix="capability")
self.name = name
self.description = None
self.property = []
self.strategic_objective = []
self.tactical_objective = []
self.behavior_reference = []
self.relationship = []
def add_tactical_objective(self, tactical_objective):
self.tactical_objective.append(tactical_objective)
def add_strategic_objective(self, strategic_objective):
self.strategic_objective.append(strategic_objective)
def to_obj(self):
capability_obj = bundle_binding.CapabilityType()
if self.id_ is not None: capability_obj.set_id(self.id_)
if self.name is not None: capability_obj.set_name(self.name)
if self.description is not None: capability_obj.set_Description(self.description)
if self.property:
for prop in self.property:
capability_obj.add_Property(prop.to_obj())
if self.strategic_objective:
for strategic_obj in self.strategic_objective:
capability_obj.add_Strategic_Objective(strategic_obj.to_obj())
if self.tactical_objective:
for tactical_obj in self.tactical_objective:
capability_obj.add_Tactical_Objective(tactical_obj.to_obj())
if self.behavior_reference:
for behavior_ref in self.behavior_reference:
capability_obj.add_Behavior_Reference(behavior_ref.to_obj())
if self.relationship:
for rel in self.relationship:
capability_obj.add_Relationship(rel.to_obj())
return capability_obj
def to_dict(self):
capability_dict = {}
if self.id_ is not None: capability_dict['id'] = self.id_
if self.name is not None: capability_dict['name'] = self.name
if self.description is not None: capability_dict['description'] = self.description
if self.property:
capability_dict['property'] = [x.to_dict() for x in self.property]
if self.strategic_objective:
capability_dict['strategic_objective'] = [x.to_dict() for x in self.strategic_objective]
if self.tactical_objective:
capability_dict['tactical_objective'] = [x.to_dict() for x in self.tactical_objective]
if self.behavior_reference:
capability_dict['behavior_reference'] = [x.to_dict() for x in self.behavior_reference]
if self.relationship:
capability_dict['relationship'] = [x.to_dict() for x in self.relationship]
return capability_dict
@staticmethod
def from_dict(capability_dict):
if not capability_dict:
return None
capability_ = Capability()
if capability_dict.get('id'): capability_.id_ = capability_dict.get('id')
capability_.name = capability_dict.get('name')
capability_.description = capability_dict.get('description')
if capability_dict.get('property'):
capability_.property = [CapabilityProperty.from_dict(x) for x in capability_dict.get('property')]
if capability_dict.get('strategic_objective'):
capability_.strategic_objective = [CapabilityObjective.from_dict(x) for x in capability_dict.get('strategic_objective')]
if capability_dict.get('tactical_objective'):
capability_.tactical_objective = [CapabilityObjective.from_dict(x) for x in capability_dict.get('tactical_objective')]
if capability_dict.get('behavior_reference'):
capability_.behavior_reference = [BehaviorReference.from_dict(x) for x in capability_dict.get('behavior_reference')]
if capability_dict.get('relationship'):
capability_.relationship = [CapabilityRelationship.from_dict(x) for x in capability_dict.get('relationship')]
return capability_
@staticmethod
def from_obj(capability_obj):
if not capability_obj:
return None
capability_ = Capability()
if capability_obj.get_id(): capability_.id_ = capability_obj.get_id()
capability_.name = capability_obj.get_name()
capability_.description = capability_obj.get_Description()
if capability_obj.get_Property():
capability_.property = [CapabilityProperty.from_obj(x) for x in capability_obj.get_Property()]
if capability_obj.get_Strategic_Objective():
capability_.strategic_objective = [CapabilityObjective.from_obj(x) for x in capability_obj.get_Strategic_Objective()]
if capability_obj.get_Tactical_Objective():
capability_.tactical_objective = [CapabilityObjective.from_obj(x) for x in capability_obj.get_Tactical_Objective()]
if capability_obj.get_Behavior_Reference():
capability_.behavior_reference = [BehaviorReference.from_obj(x) for x in capability_obj.get_Behavior_Reference()]
if capability_obj.get_Relationship():
capability_.relationship = [CapabilityRelationship.from_obj(x) for x in capability_obj.get_Relationship()]
return capability_
class CapabilityList(maec.Entity):
_namespace = maec.bundle._namespace
def __init__(self):
super(CapabilityList, self).__init__()
self.capability = []
self.capability_reference = []
def to_obj(self):
capability_list_obj = bundle_binding.CapabilityListType()
if self.capability:
for cap in self.capability:
capability_list_obj.add_Capability(cap.to_obj())
if self.capability_reference:
for cap_ref in self.capability_reference:
capability_list_obj.add_Capability_Reference(cap_ref.to_obj())
return capability_list_obj
def to_dict(self):
capability_list_dict = {}
if self.capability:
capability_list_dict['capability'] = [x.to_dict() for x in self.capability]
if self.capability_reference:
capability_list_dict['capability_reference'] = [x.to_dict() for x in self.capability_reference]
return capability_list_dict
@staticmethod
def from_obj(capability_list_obj):
if not capability_list_obj:
return None
capability_list_ = CapabilityList()
if capability_list_obj.get_Capability():
capability_list_.capability = [Capability.from_obj(x) for x in capability_list_obj.get_Capability()]
if capability_list_obj.get_Capability_Reference():
capability_list_.capability_reference = [CapabilityReference.from_obj(x) for x in capability_list_obj.get_Capability_Reference()]
return capability_list_
@staticmethod
def from_dict(capability_list_dict):
if not capability_list_dict:
return None
capability_list_ = CapabilityList()
if capability_list_dict.get('capability'):
capability_list_.capability = [Capability.from_dict(x) for x in capability_list_dict['capability']]
if capability_list_dict.get('capability_reference'):
capability_list_.capability_reference = [CapabilityReference.from_dict(x) for x in capability_list_dict['capability_reference']]
return capability_list_ | [
"[email protected]"
] | |
388a6eb4b8b486a5c9c706692097b3b4c38187c7 | 8acffb8c4ddca5bfef910e58d3faa0e4de83fce8 | /ml-flask/Lib/site-packages/pandas/_config/display.py | 57b7af184346cd2f68442d22a2bd7a489047ecad | [
"MIT"
] | permissive | YaminiHP/SimilitudeApp | 8cbde52caec3c19d5fa73508fc005f38f79b8418 | 005c59894d8788c97be16ec420c0a43aaec99b80 | refs/heads/master | 2023-06-27T00:03:00.404080 | 2021-07-25T17:51:27 | 2021-07-25T17:51:27 | 389,390,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:f6ba130797f4f1ce2395562efa48f788ebd3a352e26f7c79209f476a3d300866
size 1756
| [
"[email protected]"
] | |
5705fd2fedee9caeaeaa41e9e65f89a975c95792 | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/models/model_test.runfiles/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/layers/convolutional.py | 1688b79891c2bcd3cce1b6bb7355c216736014a3 | [
"Apache-2.0"
] | permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 182 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/layers/convolutional.py | [
"[email protected]"
] | |
7248ab453e1a86b06b69a7d02263f0431915da01 | ac01dec84e77323a9c67439f92bf3a9f1a496e61 | /django_app/motif/apps.py | 22b8a8dfd1b0066ff4cb659b0007eb98dbb7d374 | [] | no_license | Monaegi/Julia-WordyGallery | d8c970e8bd25d7cad69254a876a216fecf97e367 | 4031afe1b5d45865a61f4ff4136a8314258a917a | refs/heads/master | 2021-01-23T16:18:09.876372 | 2017-10-13T08:08:25 | 2017-10-13T08:08:25 | 102,736,537 | 1 | 0 | null | 2017-10-13T08:08:26 | 2017-09-07T12:51:47 | Python | UTF-8 | Python | false | false | 85 | py | from django.apps import AppConfig
class MotifConfig(AppConfig):
name = 'motif'
| [
"[email protected]"
] | |
4592366353bb1a72dfd875e0dfdbd622612baa2b | ef84f06e845d5c42aae2faee84c263f9eb42d92d | /keen/web/views/api/user.py | 46bd2b750294c76a1ca60d1ba6b84a5b3139654b | [] | no_license | beforebeta/keensmb | 0921473df4e92e366695cc03c9fdef96a3be4075 | 5408a42a16c83558229f62c88eec011231a0a797 | refs/heads/master | 2016-09-16T00:37:08.907191 | 2014-03-24T15:31:11 | 2014-03-24T15:31:11 | 14,530,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,726 | py | import logging
from uuid import uuid1
from django.http import HttpResponseBadRequest, HttpResponseRedirect
from django.contrib.auth import authenticate, login, logout
from django.core.urlresolvers import reverse
from django.views.decorators.csrf import ensure_csrf_cookie
from rest_framework.decorators import api_view
from rest_framework.response import Response
from keen.core.models import ClientUser
from keen.web.models import TrialRequest
from keen.web.forms import TrialRequestForm
from keen.web.serializers import ClientSerializer
from keen.tasks import send_email, mailchimp_subscribe
from tracking.models import Visitor
logger = logging.getLogger(__name__)
@ensure_csrf_cookie
@api_view(['POST'])
def login_view(request):
try:
email = request.DATA['email']
password = request.DATA['password']
except KeyError:
logger.warn('Request is missing email and/or password parameters: %r' % request.DATA)
return HttpResponseBadRequest('Missing authentication information')
user = authenticate(username=email, password=password)
logger.debug('Authenticate %r' % locals())
if user:
login(request, user)
try:
request.session['client_slug'] = ClientUser.objects.get(
user=user).client.slug
except ClientUser.DoesNotExist:
request.session['client_slug'] = None
request.session.save()
else:
request.session.save()
return Response({'success': 'Thank you for signing-in!'})
return Response({'error': 'Invalid e-mail/pasword combination'})
@ensure_csrf_cookie
@api_view(['GET'])
def logout_view(request):
request.session.pop('client_slug', None)
logout(request)
return HttpResponseRedirect(reverse('home'))
@ensure_csrf_cookie
@api_view(['POST'])
def request_free_trial(request):
form = TrialRequestForm(request.DATA)
if form.is_valid():
trial_request = TrialRequest(**form.cleaned_data)
trial_request.source = request.session.get('landing_page')
if 'visitor' in request.session:
try:
trial_request.visitor = Visitor.objects.get(
pk=request.session['visitor'])
except Visitor.DoesNotExist:
logger.error('Visitor does not exist')
try:
trial_request.save()
except DatabaseError:
logger.exception('Failed to save free trial request')
# FIXME: should we return an error?
# for now lets pretend all went well
email = trial_request.email or 'ignore+{0}@keensmb.com'.format(uuid1().hex)
mailchimp_subscribe.delay(
'aba1a09617',
email,
{
'EMAIL': email,
'NAME': trial_request.name or '',
'BIZNAME': trial_request.business or '',
'NUMBER': trial_request.phone or '',
'REFERRAL': trial_request.question or '',
'QUESTIONS': trial_request.comments or '',
},
double_optin=False,
update_existing=True,
send_welcome=False,
)
send_email.delay(
'Free Trial Request',
'''
Name: {0.name}
Business name: {0.business}
Phone number: {0.phone}
Email: {0.email}
Referral: {0.question}
Questions: {0.comments}
'''.format(trial_request),
['[email protected]'],
)
result = {
'success': 'We will be in touch shortly',
}
else:
result = {
'errors': form.errors,
}
return Response(result)
| [
"[email protected]"
] | |
e2070525c866d5c13ea17979887ad320706aefe9 | b3e7a3d1e5d50af82b60e6d7b8afa4a077a040ad | /main2.py | 5f83b2d48ceebcd188e66f2ed0f7efb1c605281a | [] | no_license | Namenaro/cheini | d317fb0a6396bf038629490231a175c62e2e6011 | 3b14b58030d1f910265da8c1b859742149df4f6f | refs/heads/master | 2021-05-10T12:30:26.042569 | 2018-02-18T20:03:49 | 2018-02-18T20:03:49 | 118,442,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,941 | py | # -*- coding: utf-8 -*
import itertools
import one_experiment_report
import utils
import simple_nets
from math import floor, ceil
import matplotlib.pyplot as plt
import numpy as np
import os
import _pickle as pickle
from keras.callbacks import EarlyStopping
from keras.callbacks import TensorBoard
from keras import optimizers
import time
from reportlab.lib.enums import TA_JUSTIFY
from reportlab.lib.pagesizes import letter
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import cm
from keras import losses
# варьируем один (или несколько) гиперпараметр - проводим таким образом серию экспериментов,
# результаты серии сводим в единый отчет: таблица из 2 столбцов (что вариьровали) и (за чем следили)
#one_experiment_report.main()
class Serial:
def __init__(self, dataset, dataset_name='default'):
self.batch_size = [3]
self.code_len = [2]
self.wb_koef_reg = [0.]
self.num_epochs = [2200]
self.drop_in_decoder = [0.0]
self.drop_in_encoder = [0.0]
self.activation = ['linear']
self.dataset = dataset
self.dataset_name = [dataset_name]
def _get_all_cominations(self):
"""
:return: список словарей - всех возхможных комбинаций значений гиперпараметров
"""
def enumdict(listed):
myDict = {}
for i, x in enumerate(listed):
myDict[i] = x
return myDict
hypermapars_arrays = self.__dict__
names = hypermapars_arrays.keys()
enumerated_names = enumdict(names) # например {0: 'code_len', 1: 'activation', 2: 'num_epochs'}
n_hyperparams = len(enumerated_names.keys())
a = [None] * n_hyperparams
for k in enumerated_names.keys():
name = enumerated_names[k]
a[k] = hypermapars_arrays[name]
all_combinations = list(itertools.product(*a))
all_dicts = []
for combination in all_combinations:
d = {}
for i in enumerated_names.keys():
name = enumerated_names[i]
d[name] = combination[i]
all_dicts.append(d)
return all_dicts
def make_experiments(self, folder_name=None):
all_dicts = self._get_all_cominations()
print("NUM EXPERIMENTS EXPECTED: " + str(len(all_dicts)))
outer_story = []
summaries = []
experiment_id = 0
if folder_name is None:
folder_name = utils.ask_user_for_name() # выбрать имя серии
if folder_name is None:
exit()
utils.setup_folder_for_results(folder_name)
folder_full_path = os.getcwd()
for params in all_dicts:
utils.setup_folder_for_results(str(experiment_id)) # имя эксперимента в серии
e = Experiment(params)
summary = e.run_it(outer_story=outer_story, name_of_experiment="experiment_" + str(experiment_id))
summary['experiment_name'] = experiment_id
all_report_line = {**params, **summary}
summaries.append(all_report_line)
experiment_id += 1
os.chdir(folder_full_path) # обратно в папку серии
doc = SimpleDocTemplate("seria_report.pdf", pagesize=letter,
rightMargin=72, leftMargin=72,
topMargin=72, bottomMargin=18)
doc.build(outer_story)
return summaries
from keras.regularizers import Regularizer
from keras import backend as K
class ActivityRegularizer(Regularizer):
def __init__(self, l1=0., l2=0.):
self.l1 = l1
self.l2 = l2
def __call__(self,x):
loss = 0
#loss += self.l1 * K.sum(K.mean(K.abs(x), axis=0))
#loss += self.l2 * K.sum(K.mean(K.square(x), axis=0))
p1 = x[0]
p2 = x[1]
p3 = x[2]
loss = 0
return 0
def get_config(self):
return {"name": self.__class__.__name__,
"l1": self.l1,
"l2": self.l2}
class Experiment:
def __init__(self, dictionary):
for k, v in dictionary.items():
setattr(self, k, v)
def run_it(self, outer_story, name_of_experiment):
print("RUN: " + str(self.__dict__))
# вытаскиваем датасет из файла
foveas01 = utils.get_dataset(self.dataset)
a_regulariser = ActivityRegularizer(l1=0., l2=0.)
# создаем и обучаем модельку
en, de, ae = simple_nets.create_ae_YANA(encoding_dim=self.code_len,
input_data_shape=foveas01[0].shape,
activity_regulariser=a_regulariser,
koef_reg=self.wb_koef_reg,
activation_on_code=self.activation,
drop_in_decoder=self.drop_in_decoder,
drop_in_encoder=self.drop_in_encoder)
sgd = optimizers.SGD(lr=0.02, decay=1e-6, momentum=0.9, nesterov=True)
ae.compile(optimizer=sgd, loss=losses.mean_squared_error)
early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=0, verbose=0, mode='auto')
history = ae.fit(foveas01, foveas01,
epochs=self.num_epochs,
#batch_size=ceil(len(foveas01) / 2),
batch_size=self.batch_size,
shuffle=False,
validation_data=(foveas01, foveas01),
callbacks=[early_stopping])
# по результатам обучения на этом датасетке генерим репорт
report = one_experiment_report.ReportOnPath(ae=ae, en=en, de=de,
dataset=foveas01,
history_obj=history,
name_of_experiment=self.dataset + "__" + name_of_experiment
)
report.create_summary()
summary, exp_outer_story = report.end()
outer_story += exp_outer_story
utils.save_all(encoder=en, decoder=de, autoencoder=ae)
return summary
def make_seria_on_dataset(dataset, name_of_seria=None):
old_dir = os.getcwd()
utils.setup_folder_for_results("SERIES")
s = Serial(dataset)
summaries = s.make_experiments(folder_name=name_of_seria)
pickle.dump(summaries, open("summaries_dicts.pkl", "wb"))
print("summaries is saved into: " + os.getcwd())
with open("settings.txt", "w") as text_file:
text_file.write(str(s.__dict__))
os.chdir(old_dir)
def get_dataset(a_dir):
return [os.path.join(a_dir, name, 'foveas.pkl') for name in os.listdir(a_dir)
if os.path.isdir(os.path.join(a_dir, name))]
def learn_models_on_dataset(folder_with_dataset, name_for_experiment):
dataset = get_dataset(folder_with_dataset)
make_seria_on_dataset(dataset, name_for_experiment)
if __name__ == "__main__":
directory = 'C:\\Users\\neuro\\PycharmProjects\\cheini\\partial\\7x7'
learn_models_on_dataset(folder_with_dataset=directory,
name_for_experiment='7x7 last ITOG')
#directory1 = 'C:\\Users\\neuro\\PycharmProjects\\cheini\\partial\\7x7'
# dataset1 = get_dataset(directory1)
#make_seria_on_dataset(dataset1, "ITOG 7x7 partial_")
| [
"[email protected]"
] | |
06005fb2c3ba90f593ed444f209cd6a808e3114b | 907cb7612ede31418997ce7b2813c9f2192e6a30 | /phase_cells/focal_evaluate/printout_network.py | a7d12f6f8ac0762947ad20ae88fc7d697979018f | [
"MIT"
] | permissive | shenghh2015/segmentation_models | c3a6f9f0a7fc2ac52d0d1f6b2beef1c69133bae2 | 473c528c724f62ff38ac127747dd8babb7de6b85 | refs/heads/master | 2023-08-14T05:52:36.290536 | 2021-10-19T03:02:46 | 2021-10-19T03:02:46 | 276,793,700 | 0 | 0 | null | 2020-07-03T02:57:39 | 2020-07-03T02:57:38 | null | UTF-8 | Python | false | false | 648 | py | import os
import sys
sys.path.append('../')
import segmentation_models as sm
from segmentation_models import Unet
os.environ["CUDA_VISIBLE_DEVICES"] = '1'
backbone = 'efficientnetb4'
model = Unet(backbone, input_shape = (736,736,3))
network_layers = model.layers
feature_layers = ['block6a_expand_activation', 'block4a_expand_activation','block3a_expand_activation', 'block2a_expand_activation']
with open('network_{}.txt'.format(backbone), 'w+') as f:
for layer in network_layers:
f.write('{}: {}\n'.format(layer.name, layer.output.get_shape()))
if layer.name in feature_layers:
f.write('\nFeature extansion ---{}\n'.format(layer.name))
| [
"[email protected]"
] | |
b99727124520efc1555a5d51225f48be9156a9ec | d8e4dece3a4c35c30ec6a90f6dc7bcf4ff43b4b4 | /searcher/server/query.py | 7e74d75d9f400b0199527b5f3e37b231f9a95987 | [] | no_license | wikty/MiniSearchEngine | c17160a9b65f462fa0690723aa860c6092dea97e | 63f7ef576f48b780fb8cf7fd3f6d955bc0037efd | refs/heads/master | 2021-01-13T02:44:39.591042 | 2018-09-13T06:40:47 | 2018-09-13T06:40:47 | 77,355,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | from searcher.indexer.pipelines import Pipeline
from .ranker import rank
from .extractor import extract
def process(db, query):
[terms, _] = Pipeline.preprocess(query)
doc_info = db.get_doc_info(terms)
doc_list = rank(db, doc_info, terms)
return extract(doc_list) | [
"[email protected]"
] | |
3a40a1e42f60c1c9f14a8869461d90cc62d7f560 | 60eb98538025c61cf94a91f6c96f9ee81dcd3fdf | /tests/test_rand_affine.py | 1e1a23bc0915f7025bb7fdc388ed9593b196b866 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | gagandaroach/MONAI | 167e7746995d4b6136731881e22ad4df333b16a9 | 79b83d9fac41efae9b90ed2f9ad078d6d664bf64 | refs/heads/master | 2023-06-02T19:54:47.737846 | 2021-06-24T18:34:02 | 2021-06-24T18:34:02 | 270,741,899 | 0 | 0 | Apache-2.0 | 2020-06-08T16:29:32 | 2020-06-08T16:29:31 | null | UTF-8 | Python | false | false | 5,638 | py | # Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import torch
from parameterized import parameterized
from monai.transforms import RandAffine
TEST_CASES = [
[
dict(as_tensor_output=False, device=None),
{"img": torch.arange(27).reshape((3, 3, 3))},
np.arange(27).reshape((3, 3, 3)),
],
[
dict(as_tensor_output=False, device=None, spatial_size=-1),
{"img": torch.arange(27).reshape((3, 3, 3))},
np.arange(27).reshape((3, 3, 3)),
],
[
dict(as_tensor_output=False, device=None),
{"img": torch.arange(27).reshape((3, 3, 3)), "spatial_size": (2, 2)},
np.array([[[2.0, 3.0], [5.0, 6.0]], [[11.0, 12.0], [14.0, 15.0]], [[20.0, 21.0], [23.0, 24.0]]]),
],
[
dict(as_tensor_output=True, device=None),
{"img": torch.ones((1, 3, 3, 3)), "spatial_size": (2, 2, 2)},
torch.ones((1, 2, 2, 2)),
],
[
dict(as_tensor_output=True, device=None, spatial_size=(2, 2, 2), cache_grid=True),
{"img": torch.ones((1, 3, 3, 3))},
torch.ones((1, 2, 2, 2)),
],
[
dict(
prob=0.9,
rotate_range=(np.pi / 2,),
shear_range=[1, 2],
translate_range=[2, 1],
as_tensor_output=True,
padding_mode="zeros",
spatial_size=(2, 2, 2),
device=None,
),
{"img": torch.ones((1, 3, 3, 3)), "mode": "bilinear"},
torch.tensor([[[[0.3658, 1.0000], [1.0000, 1.0000]], [[1.0000, 1.0000], [1.0000, 0.9333]]]]),
],
[
dict(
prob=0.9,
rotate_range=(np.pi / 2,),
shear_range=[1, 2],
translate_range=[2, 1],
as_tensor_output=True,
padding_mode="zeros",
spatial_size=(2, 2, 2),
cache_grid=True,
device=None,
),
{"img": torch.ones((1, 3, 3, 3)), "mode": "bilinear"},
torch.tensor([[[[0.3658, 1.0000], [1.0000, 1.0000]], [[1.0000, 1.0000], [1.0000, 0.9333]]]]),
],
[
dict(
prob=0.9,
rotate_range=(np.pi / 2,),
shear_range=[1, 2],
translate_range=[2, 1],
scale_range=[0.1, 0.2],
as_tensor_output=True,
device=None,
),
{"img": torch.arange(64).reshape((1, 8, 8)), "spatial_size": (3, 3)},
torch.tensor([[[18.7362, 15.5820, 12.4278], [27.3988, 24.2446, 21.0904], [36.0614, 32.9072, 29.7530]]]),
],
[
dict(
prob=0.9,
rotate_range=(np.pi / 2,),
shear_range=[1, 2],
translate_range=[2, 1],
scale_range=[0.1, 0.2],
spatial_size=(3, 3),
cache_grid=True,
as_tensor_output=True,
device=None,
),
{"img": torch.arange(64).reshape((1, 8, 8))},
torch.tensor([[[18.7362, 15.5820, 12.4278], [27.3988, 24.2446, 21.0904], [36.0614, 32.9072, 29.7530]]]),
],
]
ARR_NUMPY = np.arange(9 * 10).reshape(1, 9, 10)
ARR_TORCH = torch.Tensor(ARR_NUMPY)
TEST_CASES_SKIPPED_CONSISTENCY = []
for im in (ARR_NUMPY, ARR_TORCH):
for as_tensor_output in (True, False):
for in_dtype_is_int in (True, False):
TEST_CASES_SKIPPED_CONSISTENCY.append((im, as_tensor_output, in_dtype_is_int))
class TestRandAffine(unittest.TestCase):
@parameterized.expand(TEST_CASES)
def test_rand_affine(self, input_param, input_data, expected_val):
g = RandAffine(**input_param)
g.set_random_state(123)
result = g(**input_data)
if input_param.get("cache_grid", False):
self.assertTrue(g._cached_grid is not None)
self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))
if isinstance(result, torch.Tensor):
np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)
else:
np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)
def test_ill_cache(self):
with self.assertWarns(UserWarning):
RandAffine(cache_grid=True)
with self.assertWarns(UserWarning):
RandAffine(cache_grid=True, spatial_size=(1, 1, -1))
@parameterized.expand(TEST_CASES_SKIPPED_CONSISTENCY)
def test_skipped_transform_consistency(self, im, as_tensor_output, in_dtype_is_int):
t1 = RandAffine(prob=0, as_tensor_output=as_tensor_output)
t2 = RandAffine(prob=1, spatial_size=(10, 11), as_tensor_output=as_tensor_output)
# change dtype to int32 or float32
if in_dtype_is_int:
im = im.astype("int32") if isinstance(im, np.ndarray) else im.int()
else:
im = im.astype("float32") if isinstance(im, np.ndarray) else im.float()
out1 = t1(im)
out2 = t2(im)
# check same type
self.assertEqual(type(out1), type(out2))
# check matching dtype
self.assertEqual(out1.dtype, out2.dtype)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
5b3b3aa4586919012b05a07fefa8087dd34de097 | d0d45247209d3eabc1cb6bc0b01a8c23f807820d | /tests/test_utility.py | 8c0fd5031a9c46032e233084a2dbabffcb1e5ae4 | [
"MIT"
] | permissive | yw5aj/trimesh | 2b102c5e265108ebd089023bb1c32b3217c35059 | f7dc490f7431ced7cc121369e96b9b2eeb17490d | refs/heads/master | 2021-01-20T03:25:26.772416 | 2017-04-27T16:15:10 | 2017-04-27T16:15:10 | 89,539,048 | 0 | 0 | null | 2017-04-27T00:37:43 | 2017-04-27T00:37:43 | null | UTF-8 | Python | false | false | 8,140 | py | import trimesh
import unittest
import logging
import time
import os
import sys
import inspect
import numpy as np
import json
from collections import deque
import generic as g
TEST_DIM = (100, 3)
TOL_ZERO = 1e-9
TOL_CHECK = 1e-2
log = logging.getLogger('trimesh')
log.addHandler(logging.NullHandler())
_QUICK = '-q' in sys.argv
class VectorTests(unittest.TestCase):
def setUp(self):
self.test_dim = TEST_DIM
def test_unitize_multi(self):
vectors = np.ones(self.test_dim)
vectors[0] = [0, 0, 0]
vectors, valid = trimesh.unitize(vectors, check_valid=True)
self.assertFalse(valid[0])
self.assertTrue(np.all(valid[1:]))
length = np.sum(vectors[1:] ** 2, axis=1) ** 2
length_check = np.abs(length - 1.0) < TOL_ZERO
self.assertTrue(np.all(length_check))
def test_align(self):
log.info('Testing vector alignment')
target = np.array([0, 0, 1])
for i in range(100):
vector = trimesh.unitize(np.random.random(3) - .5)
T = trimesh.geometry.align_vectors(vector, target)
result = np.dot(T, np.append(vector, 1))[0:3]
aligned = np.abs(result - target).sum() < TOL_ZERO
self.assertTrue(aligned)
def test_horn(self):
log.info('Testing absolute orientation')
for i in range(10):
points_A = (np.random.random(self.test_dim) - .5) * 100
angle = 4 * np.pi * (np.random.random() - .5)
vector = trimesh.unitize(np.random.random(3) - .5)
offset = 100 * (np.random.random(3) - .5)
T = trimesh.transformations.rotation_matrix(angle, vector)
T[0:3, 3] = offset
points_B = trimesh.transformations.transform_points(points_A, T)
M, error = trimesh.points.absolute_orientation(
points_A, points_B, return_error=True)
self.assertTrue(np.all(error < TOL_ZERO))
class UtilTests(unittest.TestCase):
def test_track(self):
a = trimesh.util.tracked_array(np.random.random(TEST_DIM))
modified = deque()
modified.append(int(a.md5(), 16))
a[0][0] = 10
modified.append(int(a.md5(), 16))
a[1] = 5
modified.append(int(a.md5(), 16))
a[2:] = 2
modified.append(int(a.md5(), 16))
self.assertTrue((np.diff(modified) != 0).all())
modified = deque()
modified.append(int(a.md5(), 16))
b = a[[0, 1, 2]]
modified.append(int(a.md5(), 16))
c = a[1:]
modified.append(int(a.md5(), 16))
self.assertTrue((np.diff(modified) == 0).all())
def test_bounds_tree(self):
for attempt in range(3):
for dimension in [2, 3]:
t = g.np.random.random((1000, 3, dimension))
bounds = g.np.column_stack((t.min(axis=1), t.max(axis=1)))
tree = g.trimesh.util.bounds_tree(bounds)
self.assertTrue(0 in tree.intersection(bounds[0]))
def test_strips(self):
'''
Test our conversion of triangle strips to face indexes.
'''
# test 4- triangle strip
s = [g.np.arange(6)]
f = g.trimesh.util.triangle_strips_to_faces(s)
assert (f == g.np.array([[0, 1, 2],
[3, 2, 1],
[2, 3, 4],
[5, 4, 3]])).all()
assert len(f) + 2 == len(s[0])
# test single triangle
s = [g.np.arange(3)]
f = g.trimesh.util.triangle_strips_to_faces(s)
assert (f == g.np.array([[0, 1, 2]])).all()
assert len(f) + 2 == len(s[0])
s = [g.np.arange(100)]
f = g.trimesh.util.triangle_strips_to_faces(s)
assert len(f) + 2 == len(s[0])
class SceneTests(unittest.TestCase):
def setUp(self):
filename = os.path.join(g.dir_models, 'box.STL')
mesh = trimesh.load(filename)
split = mesh.split()
scene = trimesh.scene.Scene(split)
self.scene = scene
def test_scene(self):
duplicates = self.scene.duplicate_nodes()
class IOTest(unittest.TestCase):
def test_dae(self):
a = g.get_mesh('ballA.off')
r = a.export(file_type='dae')
class ContainsTest(unittest.TestCase):
def test_inside(self):
sphere = g.trimesh.primitives.Sphere(radius=1.0, subdivisions=4)
g.log.info('Testing contains function with sphere')
samples = (np.random.random((1000, 3)) - .5) * 5
radius = np.linalg.norm(samples, axis=1)
margin = .05
truth_in = radius < (1.0 - margin)
truth_out = radius > (1.0 + margin)
contains = sphere.contains(samples)
if not contains[truth_in].all():
raise ValueError('contains test doesnt match truth!')
if contains[truth_out].any():
raise ValueError('contains test doesnt match truth!')
class MassTests(unittest.TestCase):
def setUp(self):
# inertia numbers pulled from solidworks
self.truth = g.data['mass_properties']
self.meshes = dict()
for data in self.truth:
filename = data['filename']
self.meshes[filename] = g.get_mesh(filename)
def test_mass(self):
def check_parameter(a, b):
check = np.all(
np.less(np.abs(np.array(a) - np.array(b)), TOL_CHECK))
return check
for truth in self.truth:
calculated = self.meshes[truth['filename']].mass_properties(density=truth[
'density'])
parameter_count = 0
for parameter in calculated.keys():
if not (parameter in truth):
continue
parameter_ok = check_parameter(
calculated[parameter], truth[parameter])
if not parameter_ok:
log.error('Parameter %s failed on file %s!',
parameter, truth['filename'])
self.assertTrue(parameter_ok)
parameter_count += 1
log.info('%i mass parameters confirmed for %s',
parameter_count, truth['filename'])
class SphericalTests(unittest.TestCase):
def test_spherical(self):
v = g.trimesh.unitize(g.np.random.random((1000, 3)) - .5)
spherical = g.trimesh.util.vector_to_spherical(v)
v2 = g.trimesh.util.spherical_to_vector(spherical)
self.assertTrue((np.abs(v - v2) < g.trimesh.constants.tol.merge).all())
class HemisphereTests(unittest.TestCase):
def test_hemisphere(self):
v = trimesh.unitize(np.random.random((10000, 3)) - .5)
v[0] = [0, 1, 0]
v[1] = [1, 0, 0]
v[2] = [0, 0, 1]
v = np.column_stack((v, -v)).reshape((-1, 3))
resigned = trimesh.util.vector_hemisphere(v)
check = (abs(np.diff(resigned.reshape((-1, 2, 3)),
axis=1).sum(axis=2)) < trimesh.constants.tol.zero).all()
self.assertTrue(check)
class FileTests(unittest.TestCase):
def test_io_wrap(self):
test_b = g.np.random.random(1).tostring()
test_s = 'this is a test yo'
res_b = g.trimesh.util.wrap_as_stream(test_b).read()
res_s = g.trimesh.util.wrap_as_stream(test_s).read()
self.assertTrue(res_b == test_b)
self.assertTrue(res_s == test_s)
def test_file_hash(self):
data = g.np.random.random(10).tostring()
path = g.os.path.join(g.dir_data, 'nestable.json')
for file_obj in [g.trimesh.util.wrap_as_stream(data),
open(path, 'rb')]:
start = file_obj.tell()
hashed = g.trimesh.util.hash_file(file_obj)
self.assertTrue(file_obj.tell() == start)
self.assertTrue(hashed is not None)
self.assertTrue(len(hashed) > 5)
file_obj.close()
if __name__ == '__main__':
trimesh.util.attach_to_log()
unittest.main()
| [
"[email protected]"
] | |
919ad93f8397a45a32157a3d9ce108dcda051ccb | 7769cb512623c8d3ba96c68556b2cea5547df5fd | /configs/retinanet_x101_64x4d_fpn_1x.py | c8be724f92d2a09198980ad017f4851b0be09359 | [
"MIT"
] | permissive | JialeCao001/D2Det | 0e49f4c76e539d574e46b02f278242ca912c31ea | a76781ab624a1304f9c15679852a73b4b6770950 | refs/heads/master | 2022-12-05T01:00:08.498629 | 2020-09-04T11:33:26 | 2020-09-04T11:33:26 | 270,723,372 | 312 | 88 | MIT | 2020-07-08T23:53:23 | 2020-06-08T15:37:35 | Python | UTF-8 | Python | false | false | 3,901 | py | # model settings
model = dict(
type='RetinaNet',
pretrained='open-mmlab://resnext101_64x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=64,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=1,
add_extra_convs=True,
num_outs=5),
bbox_head=dict(
type='RetinaHead',
num_classes=81,
in_channels=256,
stacked_convs=4,
feat_channels=256,
octave_base_scale=4,
scales_per_octave=3,
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[8, 16, 32, 64, 128],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)))
# training and testing settings
train_cfg = dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False)
test_cfg = dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/retinanet_x101_64x4d_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| [
"[email protected]"
] | |
abb7a5ff2e147e6f3a2c6e5b5b06e12ddf6207c3 | 4a1273f72e7d8a07a3fa67ac9f2709b64ec6bc18 | /retiresmartz/tests/test_social_security.py | 79ee980a0f79030246707e0e54844ec9226eb916 | [] | no_license | WealthCity/django-project | 6668b92806d8c61ef9e20bd42daec99993cd25b2 | fa31fa82505c3d0fbc54bd8436cfc0e49c896f3e | refs/heads/dev | 2021-01-19T14:10:52.115301 | 2017-04-12T11:23:32 | 2017-04-12T11:23:32 | 88,132,284 | 0 | 1 | null | 2017-04-13T06:26:30 | 2017-04-13T06:26:29 | null | UTF-8 | Python | false | false | 370 | py | from datetime import date
from django.test import TestCase
from retiresmartz.calculator.social_security import calculate_payments
class SocialSecurityTests(TestCase):
def test_calculate_payments(self):
amounts = calculate_payments(dob=date(1975, 1, 1), income=60000)
self.assertEqual(amounts[67], 2055)
self.assertEqual(amounts[68], 2219)
| [
"[email protected]"
] | |
bb0fd3227823ae168714b2d609f75a815db3c820 | b05761d771bb5a85d39d370c649567c1ff3eb089 | /venv/lib/python3.10/site-packages/jedi/third_party/typeshed/third_party/2and3/pynamodb/connection/__init__.pyi | f649861fa13ab72cf0f93c2e820af2c7a7f8dc10 | [] | no_license | JawshyJ/Coding_Practice | 88c49cab955eab04609ec1003b6b8c20f103fc06 | eb6b229d41aa49b1545af2120e6bee8e982adb41 | refs/heads/master | 2023-02-19T10:18:04.818542 | 2023-02-06T21:22:58 | 2023-02-06T21:22:58 | 247,788,631 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | pyi | /home/runner/.cache/pip/pool/b4/87/2e/b11aa30f971bc3d814440e95ea0252de4a2d77aa81c6e6be59eb9449f8 | [
"[email protected]"
] | |
4290f33117641c516843aeaf64025823ad951026 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/f8052e4261238ff6c93465b3f0d0f22457f127ce-<container_run>-fix.py | d32a173f5a709bd873f8aaaa81b4fc29a4a7aeb0 | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,425 | py | def container_run(platform: str, nvidia_runtime: bool, docker_registry: str, shared_memory_size: str, local_ccache_dir: str, command: List[str], cleanup: Cleanup, dry_run: bool=False) -> int:
'Run command in a container'
container_wait_s = 600
environment = {
'CCACHE_MAXSIZE': '500G',
'CCACHE_TEMPDIR': '/tmp/ccache',
'CCACHE_DIR': '/work/ccache',
'CCACHE_LOGFILE': '/tmp/ccache.log',
}
jenkins_env_vars = ['BUILD_NUMBER', 'BUILD_ID', 'BUILD_TAG']
environment.update({k: os.environ[k] for k in jenkins_env_vars if (k in os.environ)})
environment.update({k: os.environ[k] for k in ['CCACHE_MAXSIZE'] if (k in os.environ)})
tag = get_docker_tag(platform=platform, registry=docker_registry)
mx_root = get_mxnet_root()
local_build_folder = buildir()
os.makedirs(local_build_folder, exist_ok=True)
os.makedirs(local_ccache_dir, exist_ok=True)
logging.info('Using ccache directory: %s', local_ccache_dir)
docker_client = docker.from_env()
docker_cmd_list = [get_docker_binary(nvidia_runtime), 'run', '--cap-add', 'SYS_PTRACE', '--rm', '--shm-size={}'.format(shared_memory_size), '-v', '{}:/work/mxnet'.format(mx_root), '-v', '{}:/work/build'.format(local_build_folder), '-v', '{}:/work/ccache'.format(local_ccache_dir), '-u', '{}:{}'.format(os.getuid(), os.getgid()), '-e', 'CCACHE_MAXSIZE={}'.format(environment['CCACHE_MAXSIZE']), '-e', 'CCACHE_TEMPDIR={}'.format(environment['CCACHE_TEMPDIR']), '-e', 'CCACHE_DIR={}'.format(environment['CCACHE_DIR']), '-e', 'CCACHE_LOGFILE={}'.format(environment['CCACHE_LOGFILE']), '-ti', tag]
docker_cmd_list.extend(command)
docker_cmd = ' \\\n\t'.join(docker_cmd_list)
logging.info('Running %s in container %s', command, tag)
logging.info('Executing the equivalent of:\n%s\n', docker_cmd)
ret = 0
if (not dry_run):
signal.pthread_sigmask(signal.SIG_BLOCK, {signal.SIGINT, signal.SIGTERM})
runtime = None
if nvidia_runtime:
runtime = 'nvidia'
container = docker_client.containers.run(tag, runtime=runtime, detach=True, command=command, shm_size=shared_memory_size, user='{}:{}'.format(os.getuid(), os.getgid()), cap_add='SYS_PTRACE', volumes={
mx_root: {
'bind': '/work/mxnet',
'mode': 'rw',
},
local_build_folder: {
'bind': '/work/build',
'mode': 'rw',
},
local_ccache_dir: {
'bind': '/work/ccache',
'mode': 'rw',
},
}, environment=environment)
try:
logging.info('Started container: %s', trim_container_id(container.id))
cleanup.add_container(container)
signal.pthread_sigmask(signal.SIG_UNBLOCK, {signal.SIGINT, signal.SIGTERM})
stream = container.logs(stream=True, stdout=True, stderr=True)
sys.stdout.flush()
for chunk in stream:
sys.stdout.buffer.write(chunk)
sys.stdout.buffer.flush()
sys.stdout.flush()
stream.close()
try:
logging.info('Waiting for status of container %s for %d s.', trim_container_id(container.id), container_wait_s)
wait_result = container.wait(timeout=container_wait_s)
logging.info('Container exit status: %s', wait_result)
ret = wait_result.get('StatusCode', 200)
except Exception as e:
logging.exception(e)
ret = 150
try:
logging.info('Stopping container: %s', trim_container_id(container.id))
container.stop()
except Exception as e:
logging.exception(e)
ret = 151
try:
logging.info('Removing container: %s', trim_container_id(container.id))
container.remove()
except Exception as e:
logging.exception(e)
ret = 152
cleanup.remove_container(container)
containers = docker_client.containers.list()
if containers:
logging.info('Other running containers: %s', [trim_container_id(x.id) for x in containers])
except docker.errors.NotFound as e:
logging.info('Container was stopped before cleanup started: %s', e)
return ret | [
"[email protected]"
] | |
60ba9feb268c4d6bdb08de9c05f99d96d934f28e | 6b95f96e00e77f78f0919c10b2c90f116c0b295d | /TelstraTPN/models/body.py | 6402d1a63505555d93481a28e94f4ec6e6af57af | [] | no_license | telstra/Programmable-Network-SDK-python | 0522b54dcba48e16837c6c58b16dabde83b477d5 | d1c19c0383af53a5f09a6f5046da466ae6e1d97a | refs/heads/master | 2021-09-19T17:09:06.831233 | 2018-07-30T03:22:26 | 2018-07-30T03:22:26 | 113,531,312 | 3 | 1 | null | 2018-07-30T03:22:27 | 2017-12-08T04:23:15 | Python | UTF-8 | Python | false | false | 3,864 | py | # coding: utf-8
"""
Telstra Programmable Network API
Telstra Programmable Network is a self-provisioning platform that allows its users to create on-demand connectivity services between multiple end-points and add various network functions to those services. Programmable Network enables to connectivity to a global ecosystem of networking services as well as public and private cloud services. Once you are connected to the platform on one or more POPs (points of presence), you can start creating those services based on the use case that you want to accomplish. The Programmable Network API is available to all customers who have registered to use the Programmable Network. To register, please contact your account representative. # noqa: E501
OpenAPI spec version: 2.4.2
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Body(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'renewal_option': 'int'
}
attribute_map = {
'renewal_option': 'renewal-option'
}
def __init__(self, renewal_option=None): # noqa: E501
"""Body - a model defined in OpenAPI""" # noqa: E501
self._renewal_option = None
self.discriminator = None
if renewal_option is not None:
self.renewal_option = renewal_option
@property
def renewal_option(self):
"""Gets the renewal_option of this Body. # noqa: E501
\"Renewal Option: 0=Auto Disconnect, 1=Auto Renew, 2=Pay per hour\" # noqa: E501
:return: The renewal_option of this Body. # noqa: E501
:rtype: int
"""
return self._renewal_option
@renewal_option.setter
def renewal_option(self, renewal_option):
"""Sets the renewal_option of this Body.
\"Renewal Option: 0=Auto Disconnect, 1=Auto Renew, 2=Pay per hour\" # noqa: E501
:param renewal_option: The renewal_option of this Body. # noqa: E501
:type: int
"""
self._renewal_option = renewal_option
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Body):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.