repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequencelengths 20
707
| docstring
stringlengths 3
17.3k
| docstring_tokens
sequencelengths 3
222
| sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value | idx
int64 0
252k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.internal_link_sets | def internal_link_sets(self):
"""
Gets the InternalLinkSets API client.
Returns:
InternalLinkSets:
"""
if not self.__internal_link_sets:
self.__internal_link_sets = InternalLinkSets(self.__connection)
return self.__internal_link_sets | python | def internal_link_sets(self):
"""
Gets the InternalLinkSets API client.
Returns:
InternalLinkSets:
"""
if not self.__internal_link_sets:
self.__internal_link_sets = InternalLinkSets(self.__connection)
return self.__internal_link_sets | [
"def",
"internal_link_sets",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__internal_link_sets",
":",
"self",
".",
"__internal_link_sets",
"=",
"InternalLinkSets",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__internal_link_sets"
] | Gets the InternalLinkSets API client.
Returns:
InternalLinkSets: | [
"Gets",
"the",
"InternalLinkSets",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L693-L702 | train | 251,600 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.logical_interconnect_groups | def logical_interconnect_groups(self):
"""
Gets the LogicalInterconnectGroups API client.
Returns:
LogicalInterconnectGroups:
"""
if not self.__logical_interconnect_groups:
self.__logical_interconnect_groups = LogicalInterconnectGroups(
self.__connection)
return self.__logical_interconnect_groups | python | def logical_interconnect_groups(self):
"""
Gets the LogicalInterconnectGroups API client.
Returns:
LogicalInterconnectGroups:
"""
if not self.__logical_interconnect_groups:
self.__logical_interconnect_groups = LogicalInterconnectGroups(
self.__connection)
return self.__logical_interconnect_groups | [
"def",
"logical_interconnect_groups",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__logical_interconnect_groups",
":",
"self",
".",
"__logical_interconnect_groups",
"=",
"LogicalInterconnectGroups",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__logical_interconnect_groups"
] | Gets the LogicalInterconnectGroups API client.
Returns:
LogicalInterconnectGroups: | [
"Gets",
"the",
"LogicalInterconnectGroups",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L705-L715 | train | 251,601 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.sas_logical_interconnects | def sas_logical_interconnects(self):
"""
Gets the SasLogicalInterconnects API client.
Returns:
SasLogicalInterconnects:
"""
if not self.__sas_logical_interconnects:
self.__sas_logical_interconnects = SasLogicalInterconnects(self.__connection)
return self.__sas_logical_interconnects | python | def sas_logical_interconnects(self):
"""
Gets the SasLogicalInterconnects API client.
Returns:
SasLogicalInterconnects:
"""
if not self.__sas_logical_interconnects:
self.__sas_logical_interconnects = SasLogicalInterconnects(self.__connection)
return self.__sas_logical_interconnects | [
"def",
"sas_logical_interconnects",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__sas_logical_interconnects",
":",
"self",
".",
"__sas_logical_interconnects",
"=",
"SasLogicalInterconnects",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__sas_logical_interconnects"
] | Gets the SasLogicalInterconnects API client.
Returns:
SasLogicalInterconnects: | [
"Gets",
"the",
"SasLogicalInterconnects",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L728-L737 | train | 251,602 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.logical_downlinks | def logical_downlinks(self):
"""
Gets the LogicalDownlinks API client.
Returns:
LogicalDownlinks:
"""
if not self.__logical_downlinks:
self.__logical_downlinks = LogicalDownlinks(
self.__connection)
return self.__logical_downlinks | python | def logical_downlinks(self):
"""
Gets the LogicalDownlinks API client.
Returns:
LogicalDownlinks:
"""
if not self.__logical_downlinks:
self.__logical_downlinks = LogicalDownlinks(
self.__connection)
return self.__logical_downlinks | [
"def",
"logical_downlinks",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__logical_downlinks",
":",
"self",
".",
"__logical_downlinks",
"=",
"LogicalDownlinks",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__logical_downlinks"
] | Gets the LogicalDownlinks API client.
Returns:
LogicalDownlinks: | [
"Gets",
"the",
"LogicalDownlinks",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L740-L750 | train | 251,603 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.power_devices | def power_devices(self):
"""
Gets the PowerDevices API client.
Returns:
PowerDevices:
"""
if not self.__power_devices:
self.__power_devices = PowerDevices(self.__connection)
return self.__power_devices | python | def power_devices(self):
"""
Gets the PowerDevices API client.
Returns:
PowerDevices:
"""
if not self.__power_devices:
self.__power_devices = PowerDevices(self.__connection)
return self.__power_devices | [
"def",
"power_devices",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__power_devices",
":",
"self",
".",
"__power_devices",
"=",
"PowerDevices",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__power_devices"
] | Gets the PowerDevices API client.
Returns:
PowerDevices: | [
"Gets",
"the",
"PowerDevices",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L753-L762 | train | 251,604 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.unmanaged_devices | def unmanaged_devices(self):
"""
Gets the Unmanaged Devices API client.
Returns:
UnmanagedDevices:
"""
if not self.__unmanaged_devices:
self.__unmanaged_devices = UnmanagedDevices(self.__connection)
return self.__unmanaged_devices | python | def unmanaged_devices(self):
"""
Gets the Unmanaged Devices API client.
Returns:
UnmanagedDevices:
"""
if not self.__unmanaged_devices:
self.__unmanaged_devices = UnmanagedDevices(self.__connection)
return self.__unmanaged_devices | [
"def",
"unmanaged_devices",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__unmanaged_devices",
":",
"self",
".",
"__unmanaged_devices",
"=",
"UnmanagedDevices",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__unmanaged_devices"
] | Gets the Unmanaged Devices API client.
Returns:
UnmanagedDevices: | [
"Gets",
"the",
"Unmanaged",
"Devices",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L765-L774 | train | 251,605 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.racks | def racks(self):
"""
Gets the Racks API client.
Returns:
Racks:
"""
if not self.__racks:
self.__racks = Racks(self.__connection)
return self.__racks | python | def racks(self):
"""
Gets the Racks API client.
Returns:
Racks:
"""
if not self.__racks:
self.__racks = Racks(self.__connection)
return self.__racks | [
"def",
"racks",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__racks",
":",
"self",
".",
"__racks",
"=",
"Racks",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__racks"
] | Gets the Racks API client.
Returns:
Racks: | [
"Gets",
"the",
"Racks",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L777-L786 | train | 251,606 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.san_managers | def san_managers(self):
"""
Gets the SanManagers API client.
Returns:
SanManagers:
"""
if not self.__san_managers:
self.__san_managers = SanManagers(self.__connection)
return self.__san_managers | python | def san_managers(self):
"""
Gets the SanManagers API client.
Returns:
SanManagers:
"""
if not self.__san_managers:
self.__san_managers = SanManagers(self.__connection)
return self.__san_managers | [
"def",
"san_managers",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__san_managers",
":",
"self",
".",
"__san_managers",
"=",
"SanManagers",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__san_managers"
] | Gets the SanManagers API client.
Returns:
SanManagers: | [
"Gets",
"the",
"SanManagers",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L789-L798 | train | 251,607 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.endpoints | def endpoints(self):
"""
Gets the Endpoints API client.
Returns:
Endpoints:
"""
if not self.__endpoints:
self.__endpoints = Endpoints(self.__connection)
return self.__endpoints | python | def endpoints(self):
"""
Gets the Endpoints API client.
Returns:
Endpoints:
"""
if not self.__endpoints:
self.__endpoints = Endpoints(self.__connection)
return self.__endpoints | [
"def",
"endpoints",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__endpoints",
":",
"self",
".",
"__endpoints",
"=",
"Endpoints",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__endpoints"
] | Gets the Endpoints API client.
Returns:
Endpoints: | [
"Gets",
"the",
"Endpoints",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L801-L810 | train | 251,608 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.storage_systems | def storage_systems(self):
"""
Gets the StorageSystems API client.
Returns:
StorageSystems:
"""
if not self.__storage_systems:
self.__storage_systems = StorageSystems(self.__connection)
return self.__storage_systems | python | def storage_systems(self):
"""
Gets the StorageSystems API client.
Returns:
StorageSystems:
"""
if not self.__storage_systems:
self.__storage_systems = StorageSystems(self.__connection)
return self.__storage_systems | [
"def",
"storage_systems",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__storage_systems",
":",
"self",
".",
"__storage_systems",
"=",
"StorageSystems",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__storage_systems"
] | Gets the StorageSystems API client.
Returns:
StorageSystems: | [
"Gets",
"the",
"StorageSystems",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L833-L842 | train | 251,609 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.storage_pools | def storage_pools(self):
"""
Gets the StoragePools API client.
Returns:
StoragePools:
"""
if not self.__storage_pools:
self.__storage_pools = StoragePools(self.__connection)
return self.__storage_pools | python | def storage_pools(self):
"""
Gets the StoragePools API client.
Returns:
StoragePools:
"""
if not self.__storage_pools:
self.__storage_pools = StoragePools(self.__connection)
return self.__storage_pools | [
"def",
"storage_pools",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__storage_pools",
":",
"self",
".",
"__storage_pools",
"=",
"StoragePools",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__storage_pools"
] | Gets the StoragePools API client.
Returns:
StoragePools: | [
"Gets",
"the",
"StoragePools",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L845-L854 | train | 251,610 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.storage_volume_templates | def storage_volume_templates(self):
"""
Gets the StorageVolumeTemplates API client.
Returns:
StorageVolumeTemplates:
"""
if not self.__storage_volume_templates:
self.__storage_volume_templates = StorageVolumeTemplates(self.__connection)
return self.__storage_volume_templates | python | def storage_volume_templates(self):
"""
Gets the StorageVolumeTemplates API client.
Returns:
StorageVolumeTemplates:
"""
if not self.__storage_volume_templates:
self.__storage_volume_templates = StorageVolumeTemplates(self.__connection)
return self.__storage_volume_templates | [
"def",
"storage_volume_templates",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__storage_volume_templates",
":",
"self",
".",
"__storage_volume_templates",
"=",
"StorageVolumeTemplates",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__storage_volume_templates"
] | Gets the StorageVolumeTemplates API client.
Returns:
StorageVolumeTemplates: | [
"Gets",
"the",
"StorageVolumeTemplates",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L857-L866 | train | 251,611 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.storage_volume_attachments | def storage_volume_attachments(self):
"""
Gets the StorageVolumeAttachments API client.
Returns:
StorageVolumeAttachments:
"""
if not self.__storage_volume_attachments:
self.__storage_volume_attachments = StorageVolumeAttachments(self.__connection)
return self.__storage_volume_attachments | python | def storage_volume_attachments(self):
"""
Gets the StorageVolumeAttachments API client.
Returns:
StorageVolumeAttachments:
"""
if not self.__storage_volume_attachments:
self.__storage_volume_attachments = StorageVolumeAttachments(self.__connection)
return self.__storage_volume_attachments | [
"def",
"storage_volume_attachments",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__storage_volume_attachments",
":",
"self",
".",
"__storage_volume_attachments",
"=",
"StorageVolumeAttachments",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__storage_volume_attachments"
] | Gets the StorageVolumeAttachments API client.
Returns:
StorageVolumeAttachments: | [
"Gets",
"the",
"StorageVolumeAttachments",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L869-L878 | train | 251,612 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.firmware_drivers | def firmware_drivers(self):
"""
Gets the FirmwareDrivers API client.
Returns:
FirmwareDrivers:
"""
if not self.__firmware_drivers:
self.__firmware_drivers = FirmwareDrivers(self.__connection)
return self.__firmware_drivers | python | def firmware_drivers(self):
"""
Gets the FirmwareDrivers API client.
Returns:
FirmwareDrivers:
"""
if not self.__firmware_drivers:
self.__firmware_drivers = FirmwareDrivers(self.__connection)
return self.__firmware_drivers | [
"def",
"firmware_drivers",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__firmware_drivers",
":",
"self",
".",
"__firmware_drivers",
"=",
"FirmwareDrivers",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__firmware_drivers"
] | Gets the FirmwareDrivers API client.
Returns:
FirmwareDrivers: | [
"Gets",
"the",
"FirmwareDrivers",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L881-L890 | train | 251,613 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.firmware_bundles | def firmware_bundles(self):
"""
Gets the FirmwareBundles API client.
Returns:
FirmwareBundles:
"""
if not self.__firmware_bundles:
self.__firmware_bundles = FirmwareBundles(self.__connection)
return self.__firmware_bundles | python | def firmware_bundles(self):
"""
Gets the FirmwareBundles API client.
Returns:
FirmwareBundles:
"""
if not self.__firmware_bundles:
self.__firmware_bundles = FirmwareBundles(self.__connection)
return self.__firmware_bundles | [
"def",
"firmware_bundles",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__firmware_bundles",
":",
"self",
".",
"__firmware_bundles",
"=",
"FirmwareBundles",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__firmware_bundles"
] | Gets the FirmwareBundles API client.
Returns:
FirmwareBundles: | [
"Gets",
"the",
"FirmwareBundles",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L893-L902 | train | 251,614 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.volumes | def volumes(self):
"""
Gets the Volumes API client.
Returns:
Volumes:
"""
if not self.__volumes:
self.__volumes = Volumes(self.__connection)
return self.__volumes | python | def volumes(self):
"""
Gets the Volumes API client.
Returns:
Volumes:
"""
if not self.__volumes:
self.__volumes = Volumes(self.__connection)
return self.__volumes | [
"def",
"volumes",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__volumes",
":",
"self",
".",
"__volumes",
"=",
"Volumes",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__volumes"
] | Gets the Volumes API client.
Returns:
Volumes: | [
"Gets",
"the",
"Volumes",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L915-L924 | train | 251,615 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.sas_logical_jbod_attachments | def sas_logical_jbod_attachments(self):
"""
Gets the SAS Logical JBOD Attachments client.
Returns:
SasLogicalJbodAttachments:
"""
if not self.__sas_logical_jbod_attachments:
self.__sas_logical_jbod_attachments = SasLogicalJbodAttachments(self.__connection)
return self.__sas_logical_jbod_attachments | python | def sas_logical_jbod_attachments(self):
"""
Gets the SAS Logical JBOD Attachments client.
Returns:
SasLogicalJbodAttachments:
"""
if not self.__sas_logical_jbod_attachments:
self.__sas_logical_jbod_attachments = SasLogicalJbodAttachments(self.__connection)
return self.__sas_logical_jbod_attachments | [
"def",
"sas_logical_jbod_attachments",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__sas_logical_jbod_attachments",
":",
"self",
".",
"__sas_logical_jbod_attachments",
"=",
"SasLogicalJbodAttachments",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__sas_logical_jbod_attachments"
] | Gets the SAS Logical JBOD Attachments client.
Returns:
SasLogicalJbodAttachments: | [
"Gets",
"the",
"SAS",
"Logical",
"JBOD",
"Attachments",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L927-L936 | train | 251,616 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.managed_sans | def managed_sans(self):
"""
Gets the Managed SANs API client.
Returns:
ManagedSANs:
"""
if not self.__managed_sans:
self.__managed_sans = ManagedSANs(self.__connection)
return self.__managed_sans | python | def managed_sans(self):
"""
Gets the Managed SANs API client.
Returns:
ManagedSANs:
"""
if not self.__managed_sans:
self.__managed_sans = ManagedSANs(self.__connection)
return self.__managed_sans | [
"def",
"managed_sans",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__managed_sans",
":",
"self",
".",
"__managed_sans",
"=",
"ManagedSANs",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__managed_sans"
] | Gets the Managed SANs API client.
Returns:
ManagedSANs: | [
"Gets",
"the",
"Managed",
"SANs",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L939-L948 | train | 251,617 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.migratable_vc_domains | def migratable_vc_domains(self):
"""
Gets the VC Migration Manager API client.
Returns:
MigratableVcDomains:
"""
if not self.__migratable_vc_domains:
self.__migratable_vc_domains = MigratableVcDomains(self.__connection)
return self.__migratable_vc_domains | python | def migratable_vc_domains(self):
"""
Gets the VC Migration Manager API client.
Returns:
MigratableVcDomains:
"""
if not self.__migratable_vc_domains:
self.__migratable_vc_domains = MigratableVcDomains(self.__connection)
return self.__migratable_vc_domains | [
"def",
"migratable_vc_domains",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__migratable_vc_domains",
":",
"self",
".",
"__migratable_vc_domains",
"=",
"MigratableVcDomains",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__migratable_vc_domains"
] | Gets the VC Migration Manager API client.
Returns:
MigratableVcDomains: | [
"Gets",
"the",
"VC",
"Migration",
"Manager",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L951-L960 | train | 251,618 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.sas_interconnects | def sas_interconnects(self):
"""
Gets the SAS Interconnects API client.
Returns:
SasInterconnects:
"""
if not self.__sas_interconnects:
self.__sas_interconnects = SasInterconnects(self.__connection)
return self.__sas_interconnects | python | def sas_interconnects(self):
"""
Gets the SAS Interconnects API client.
Returns:
SasInterconnects:
"""
if not self.__sas_interconnects:
self.__sas_interconnects = SasInterconnects(self.__connection)
return self.__sas_interconnects | [
"def",
"sas_interconnects",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__sas_interconnects",
":",
"self",
".",
"__sas_interconnects",
"=",
"SasInterconnects",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__sas_interconnects"
] | Gets the SAS Interconnects API client.
Returns:
SasInterconnects: | [
"Gets",
"the",
"SAS",
"Interconnects",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L963-L972 | train | 251,619 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.sas_logical_interconnect_groups | def sas_logical_interconnect_groups(self):
"""
Gets the SasLogicalInterconnectGroups API client.
Returns:
SasLogicalInterconnectGroups:
"""
if not self.__sas_logical_interconnect_groups:
self.__sas_logical_interconnect_groups = SasLogicalInterconnectGroups(self.__connection)
return self.__sas_logical_interconnect_groups | python | def sas_logical_interconnect_groups(self):
"""
Gets the SasLogicalInterconnectGroups API client.
Returns:
SasLogicalInterconnectGroups:
"""
if not self.__sas_logical_interconnect_groups:
self.__sas_logical_interconnect_groups = SasLogicalInterconnectGroups(self.__connection)
return self.__sas_logical_interconnect_groups | [
"def",
"sas_logical_interconnect_groups",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__sas_logical_interconnect_groups",
":",
"self",
".",
"__sas_logical_interconnect_groups",
"=",
"SasLogicalInterconnectGroups",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__sas_logical_interconnect_groups"
] | Gets the SasLogicalInterconnectGroups API client.
Returns:
SasLogicalInterconnectGroups: | [
"Gets",
"the",
"SasLogicalInterconnectGroups",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L975-L984 | train | 251,620 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.drive_enclosures | def drive_enclosures(self):
"""
Gets the Drive Enclosures API client.
Returns:
DriveEnclosures:
"""
if not self.__drive_enclures:
self.__drive_enclures = DriveEnclosures(self.__connection)
return self.__drive_enclures | python | def drive_enclosures(self):
"""
Gets the Drive Enclosures API client.
Returns:
DriveEnclosures:
"""
if not self.__drive_enclures:
self.__drive_enclures = DriveEnclosures(self.__connection)
return self.__drive_enclures | [
"def",
"drive_enclosures",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__drive_enclures",
":",
"self",
".",
"__drive_enclures",
"=",
"DriveEnclosures",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__drive_enclures"
] | Gets the Drive Enclosures API client.
Returns:
DriveEnclosures: | [
"Gets",
"the",
"Drive",
"Enclosures",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L987-L996 | train | 251,621 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.sas_logical_jbods | def sas_logical_jbods(self):
"""
Gets the SAS Logical JBODs API client.
Returns:
SasLogicalJbod:
"""
if not self.__sas_logical_jbods:
self.__sas_logical_jbods = SasLogicalJbods(self.__connection)
return self.__sas_logical_jbods | python | def sas_logical_jbods(self):
"""
Gets the SAS Logical JBODs API client.
Returns:
SasLogicalJbod:
"""
if not self.__sas_logical_jbods:
self.__sas_logical_jbods = SasLogicalJbods(self.__connection)
return self.__sas_logical_jbods | [
"def",
"sas_logical_jbods",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__sas_logical_jbods",
":",
"self",
".",
"__sas_logical_jbods",
"=",
"SasLogicalJbods",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__sas_logical_jbods"
] | Gets the SAS Logical JBODs API client.
Returns:
SasLogicalJbod: | [
"Gets",
"the",
"SAS",
"Logical",
"JBODs",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L999-L1008 | train | 251,622 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.labels | def labels(self):
"""
Gets the Labels API client.
Returns:
Labels:
"""
if not self.__labels:
self.__labels = Labels(self.__connection)
return self.__labels | python | def labels(self):
"""
Gets the Labels API client.
Returns:
Labels:
"""
if not self.__labels:
self.__labels = Labels(self.__connection)
return self.__labels | [
"def",
"labels",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__labels",
":",
"self",
".",
"__labels",
"=",
"Labels",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__labels"
] | Gets the Labels API client.
Returns:
Labels: | [
"Gets",
"the",
"Labels",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1011-L1020 | train | 251,623 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.index_resources | def index_resources(self):
"""
Gets the Index Resources API client.
Returns:
IndexResources:
"""
if not self.__index_resources:
self.__index_resources = IndexResources(self.__connection)
return self.__index_resources | python | def index_resources(self):
"""
Gets the Index Resources API client.
Returns:
IndexResources:
"""
if not self.__index_resources:
self.__index_resources = IndexResources(self.__connection)
return self.__index_resources | [
"def",
"index_resources",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__index_resources",
":",
"self",
".",
"__index_resources",
"=",
"IndexResources",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__index_resources"
] | Gets the Index Resources API client.
Returns:
IndexResources: | [
"Gets",
"the",
"Index",
"Resources",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1023-L1032 | train | 251,624 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.alerts | def alerts(self):
"""
Gets the Alerts API client.
Returns:
Alerts:
"""
if not self.__alerts:
self.__alerts = Alerts(self.__connection)
return self.__alerts | python | def alerts(self):
"""
Gets the Alerts API client.
Returns:
Alerts:
"""
if not self.__alerts:
self.__alerts = Alerts(self.__connection)
return self.__alerts | [
"def",
"alerts",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__alerts",
":",
"self",
".",
"__alerts",
"=",
"Alerts",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__alerts"
] | Gets the Alerts API client.
Returns:
Alerts: | [
"Gets",
"the",
"Alerts",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1035-L1044 | train | 251,625 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.events | def events(self):
"""
Gets the Events API client.
Returns:
Events:
"""
if not self.__events:
self.__events = Events(self.__connection)
return self.__events | python | def events(self):
"""
Gets the Events API client.
Returns:
Events:
"""
if not self.__events:
self.__events = Events(self.__connection)
return self.__events | [
"def",
"events",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__events",
":",
"self",
".",
"__events",
"=",
"Events",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__events"
] | Gets the Events API client.
Returns:
Events: | [
"Gets",
"the",
"Events",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1047-L1056 | train | 251,626 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.os_deployment_servers | def os_deployment_servers(self):
"""
Gets the Os Deployment Servers API client.
Returns:
OsDeploymentServers:
"""
if not self.__os_deployment_servers:
self.__os_deployment_servers = OsDeploymentServers(self.__connection)
return self.__os_deployment_servers | python | def os_deployment_servers(self):
"""
Gets the Os Deployment Servers API client.
Returns:
OsDeploymentServers:
"""
if not self.__os_deployment_servers:
self.__os_deployment_servers = OsDeploymentServers(self.__connection)
return self.__os_deployment_servers | [
"def",
"os_deployment_servers",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__os_deployment_servers",
":",
"self",
".",
"__os_deployment_servers",
"=",
"OsDeploymentServers",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__os_deployment_servers"
] | Gets the Os Deployment Servers API client.
Returns:
OsDeploymentServers: | [
"Gets",
"the",
"Os",
"Deployment",
"Servers",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1069-L1078 | train | 251,627 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.certificate_rabbitmq | def certificate_rabbitmq(self):
"""
Gets the Certificate RabbitMQ API client.
Returns:
CertificateRabbitMQ:
"""
if not self.__certificate_rabbitmq:
self.__certificate_rabbitmq = CertificateRabbitMQ(self.__connection)
return self.__certificate_rabbitmq | python | def certificate_rabbitmq(self):
"""
Gets the Certificate RabbitMQ API client.
Returns:
CertificateRabbitMQ:
"""
if not self.__certificate_rabbitmq:
self.__certificate_rabbitmq = CertificateRabbitMQ(self.__connection)
return self.__certificate_rabbitmq | [
"def",
"certificate_rabbitmq",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__certificate_rabbitmq",
":",
"self",
".",
"__certificate_rabbitmq",
"=",
"CertificateRabbitMQ",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__certificate_rabbitmq"
] | Gets the Certificate RabbitMQ API client.
Returns:
CertificateRabbitMQ: | [
"Gets",
"the",
"Certificate",
"RabbitMQ",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1081-L1090 | train | 251,628 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.users | def users(self):
"""
Gets the Users API client.
Returns:
Users:
"""
if not self.__users:
self.__users = Users(self.__connection)
return self.__users | python | def users(self):
"""
Gets the Users API client.
Returns:
Users:
"""
if not self.__users:
self.__users = Users(self.__connection)
return self.__users | [
"def",
"users",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__users",
":",
"self",
".",
"__users",
"=",
"Users",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__users"
] | Gets the Users API client.
Returns:
Users: | [
"Gets",
"the",
"Users",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1093-L1102 | train | 251,629 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.appliance_device_read_community | def appliance_device_read_community(self):
"""
Gets the ApplianceDeviceReadCommunity API client.
Returns:
ApplianceDeviceReadCommunity:
"""
if not self.__appliance_device_read_community:
self.__appliance_device_read_community = ApplianceDeviceReadCommunity(self.__connection)
return self.__appliance_device_read_community | python | def appliance_device_read_community(self):
"""
Gets the ApplianceDeviceReadCommunity API client.
Returns:
ApplianceDeviceReadCommunity:
"""
if not self.__appliance_device_read_community:
self.__appliance_device_read_community = ApplianceDeviceReadCommunity(self.__connection)
return self.__appliance_device_read_community | [
"def",
"appliance_device_read_community",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__appliance_device_read_community",
":",
"self",
".",
"__appliance_device_read_community",
"=",
"ApplianceDeviceReadCommunity",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__appliance_device_read_community"
] | Gets the ApplianceDeviceReadCommunity API client.
Returns:
ApplianceDeviceReadCommunity: | [
"Gets",
"the",
"ApplianceDeviceReadCommunity",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1105-L1114 | train | 251,630 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.appliance_device_snmp_v1_trap_destinations | def appliance_device_snmp_v1_trap_destinations(self):
"""
Gets the ApplianceDeviceSNMPv1TrapDestinations API client.
Returns:
ApplianceDeviceSNMPv1TrapDestinations:
"""
if not self.__appliance_device_snmp_v1_trap_destinations:
self.__appliance_device_snmp_v1_trap_destinations = ApplianceDeviceSNMPv1TrapDestinations(self.__connection)
return self.__appliance_device_snmp_v1_trap_destinations | python | def appliance_device_snmp_v1_trap_destinations(self):
"""
Gets the ApplianceDeviceSNMPv1TrapDestinations API client.
Returns:
ApplianceDeviceSNMPv1TrapDestinations:
"""
if not self.__appliance_device_snmp_v1_trap_destinations:
self.__appliance_device_snmp_v1_trap_destinations = ApplianceDeviceSNMPv1TrapDestinations(self.__connection)
return self.__appliance_device_snmp_v1_trap_destinations | [
"def",
"appliance_device_snmp_v1_trap_destinations",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__appliance_device_snmp_v1_trap_destinations",
":",
"self",
".",
"__appliance_device_snmp_v1_trap_destinations",
"=",
"ApplianceDeviceSNMPv1TrapDestinations",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__appliance_device_snmp_v1_trap_destinations"
] | Gets the ApplianceDeviceSNMPv1TrapDestinations API client.
Returns:
ApplianceDeviceSNMPv1TrapDestinations: | [
"Gets",
"the",
"ApplianceDeviceSNMPv1TrapDestinations",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1117-L1126 | train | 251,631 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.appliance_device_snmp_v3_trap_destinations | def appliance_device_snmp_v3_trap_destinations(self):
"""
Gets the ApplianceDeviceSNMPv3TrapDestinations API client.
Returns:
ApplianceDeviceSNMPv3TrapDestinations:
"""
if not self.__appliance_device_snmp_v3_trap_destinations:
self.__appliance_device_snmp_v3_trap_destinations = ApplianceDeviceSNMPv3TrapDestinations(self.__connection)
return self.__appliance_device_snmp_v3_trap_destinations | python | def appliance_device_snmp_v3_trap_destinations(self):
"""
Gets the ApplianceDeviceSNMPv3TrapDestinations API client.
Returns:
ApplianceDeviceSNMPv3TrapDestinations:
"""
if not self.__appliance_device_snmp_v3_trap_destinations:
self.__appliance_device_snmp_v3_trap_destinations = ApplianceDeviceSNMPv3TrapDestinations(self.__connection)
return self.__appliance_device_snmp_v3_trap_destinations | [
"def",
"appliance_device_snmp_v3_trap_destinations",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__appliance_device_snmp_v3_trap_destinations",
":",
"self",
".",
"__appliance_device_snmp_v3_trap_destinations",
"=",
"ApplianceDeviceSNMPv3TrapDestinations",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__appliance_device_snmp_v3_trap_destinations"
] | Gets the ApplianceDeviceSNMPv3TrapDestinations API client.
Returns:
ApplianceDeviceSNMPv3TrapDestinations: | [
"Gets",
"the",
"ApplianceDeviceSNMPv3TrapDestinations",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1129-L1138 | train | 251,632 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.appliance_device_snmp_v3_users | def appliance_device_snmp_v3_users(self):
"""
Gets the ApplianceDeviceSNMPv3Users API client.
Returns:
ApplianceDeviceSNMPv3Users:
"""
if not self.__appliance_device_snmp_v3_users:
self.__appliance_device_snmp_v3_users = ApplianceDeviceSNMPv3Users(self.__connection)
return self.__appliance_device_snmp_v3_users | python | def appliance_device_snmp_v3_users(self):
"""
Gets the ApplianceDeviceSNMPv3Users API client.
Returns:
ApplianceDeviceSNMPv3Users:
"""
if not self.__appliance_device_snmp_v3_users:
self.__appliance_device_snmp_v3_users = ApplianceDeviceSNMPv3Users(self.__connection)
return self.__appliance_device_snmp_v3_users | [
"def",
"appliance_device_snmp_v3_users",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__appliance_device_snmp_v3_users",
":",
"self",
".",
"__appliance_device_snmp_v3_users",
"=",
"ApplianceDeviceSNMPv3Users",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__appliance_device_snmp_v3_users"
] | Gets the ApplianceDeviceSNMPv3Users API client.
Returns:
ApplianceDeviceSNMPv3Users: | [
"Gets",
"the",
"ApplianceDeviceSNMPv3Users",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1141-L1150 | train | 251,633 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.appliance_node_information | def appliance_node_information(self):
"""
Gets the ApplianceNodeInformation API client.
Returns:
ApplianceNodeInformation:
"""
if not self.__appliance_node_information:
self.__appliance_node_information = ApplianceNodeInformation(self.__connection)
return self.__appliance_node_information | python | def appliance_node_information(self):
"""
Gets the ApplianceNodeInformation API client.
Returns:
ApplianceNodeInformation:
"""
if not self.__appliance_node_information:
self.__appliance_node_information = ApplianceNodeInformation(self.__connection)
return self.__appliance_node_information | [
"def",
"appliance_node_information",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__appliance_node_information",
":",
"self",
".",
"__appliance_node_information",
"=",
"ApplianceNodeInformation",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__appliance_node_information"
] | Gets the ApplianceNodeInformation API client.
Returns:
ApplianceNodeInformation: | [
"Gets",
"the",
"ApplianceNodeInformation",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1153-L1162 | train | 251,634 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.appliance_time_and_locale_configuration | def appliance_time_and_locale_configuration(self):
"""
Gets the ApplianceTimeAndLocaleConfiguration API client.
Returns:
ApplianceTimeAndLocaleConfiguration:
"""
if not self.__appliance_time_and_locale_configuration:
self.__appliance_time_and_locale_configuration = ApplianceTimeAndLocaleConfiguration(self.__connection)
return self.__appliance_time_and_locale_configuration | python | def appliance_time_and_locale_configuration(self):
"""
Gets the ApplianceTimeAndLocaleConfiguration API client.
Returns:
ApplianceTimeAndLocaleConfiguration:
"""
if not self.__appliance_time_and_locale_configuration:
self.__appliance_time_and_locale_configuration = ApplianceTimeAndLocaleConfiguration(self.__connection)
return self.__appliance_time_and_locale_configuration | [
"def",
"appliance_time_and_locale_configuration",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__appliance_time_and_locale_configuration",
":",
"self",
".",
"__appliance_time_and_locale_configuration",
"=",
"ApplianceTimeAndLocaleConfiguration",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__appliance_time_and_locale_configuration"
] | Gets the ApplianceTimeAndLocaleConfiguration API client.
Returns:
ApplianceTimeAndLocaleConfiguration: | [
"Gets",
"the",
"ApplianceTimeAndLocaleConfiguration",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1165-L1174 | train | 251,635 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.versions | def versions(self):
"""
Gets the Version API client.
Returns:
Version:
"""
if not self.__versions:
self.__versions = Versions(self.__connection)
return self.__versions | python | def versions(self):
"""
Gets the Version API client.
Returns:
Version:
"""
if not self.__versions:
self.__versions = Versions(self.__connection)
return self.__versions | [
"def",
"versions",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__versions",
":",
"self",
".",
"__versions",
"=",
"Versions",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__versions"
] | Gets the Version API client.
Returns:
Version: | [
"Gets",
"the",
"Version",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1177-L1186 | train | 251,636 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.backups | def backups(self):
"""
Gets the Backup API client.
Returns:
Backups:
"""
if not self.__backups:
self.__backups = Backups(self.__connection)
return self.__backups | python | def backups(self):
"""
Gets the Backup API client.
Returns:
Backups:
"""
if not self.__backups:
self.__backups = Backups(self.__connection)
return self.__backups | [
"def",
"backups",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__backups",
":",
"self",
".",
"__backups",
"=",
"Backups",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__backups"
] | Gets the Backup API client.
Returns:
Backups: | [
"Gets",
"the",
"Backup",
"API",
"client",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1189-L1198 | train | 251,637 |
HewlettPackard/python-hpOneView | hpOneView/oneview_client.py | OneViewClient.login_details | def login_details(self):
"""
Gets the login details
Returns:
List of login details
"""
if not self.__login_details:
self.__login_details = LoginDetails(self.__connection)
return self.__login_details | python | def login_details(self):
"""
Gets the login details
Returns:
List of login details
"""
if not self.__login_details:
self.__login_details = LoginDetails(self.__connection)
return self.__login_details | [
"def",
"login_details",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"__login_details",
":",
"self",
".",
"__login_details",
"=",
"LoginDetails",
"(",
"self",
".",
"__connection",
")",
"return",
"self",
".",
"__login_details"
] | Gets the login details
Returns:
List of login details | [
"Gets",
"the",
"login",
"details"
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1201-L1210 | train | 251,638 |
HewlettPackard/python-hpOneView | hpOneView/resources/servers/server_profile_templates.py | ServerProfileTemplate.get_available_networks | def get_available_networks(self, **kwargs):
"""
Retrieves the list of Ethernet networks, Fibre Channel networks and network sets that are available
to a server profile template along with their respective ports. The scopeUris, serverHardwareTypeUri and
enclosureGroupUri parameters should be specified to get the available networks for a new server profile template.
The serverHardwareTypeUri, enclosureGroupUri, and profileTemplateUri should be specified to get available
networks for an existing server profile template.
The scopeUris parameter is ignored when the profileTemplateUri is specified.
Args:
enclosureGroupUri: The URI of the enclosure group is required when the serverHardwareTypeUri
specifies a blade server.
profileTemplateUri: If the URI of the server profile template is provided the list of available
networks will include only networks that share a scope with the server profile template.
scopeUris: An expression to restrict the resources returned according to the scopes
to which they are assigned.
serverHardwareTypeUri: If the server hardware type specifies a rack server, the list of
available network includes all networks that are applicable for the specified server hardware type.
If the server hardware type specifies a blade server, the enclosureGroupUri parameter must be
specified, and the list of available networks includes all networks that are applicable for the
specified server hardware type and all empty bays within the enclosure group that can support
the specified server hardware type.
view: The FunctionType (Ethernet or FibreChannel) to filter the list of networks returned.
Returns:
dict: Dictionary with available networks details.
"""
query_string = '&'.join('{}={}'.format(key, value)
for key, value in kwargs.items() if value)
uri = self.URI + "{}?{}".format("/available-networks", query_string)
return self._helper.do_get(uri) | python | def get_available_networks(self, **kwargs):
"""
Retrieves the list of Ethernet networks, Fibre Channel networks and network sets that are available
to a server profile template along with their respective ports. The scopeUris, serverHardwareTypeUri and
enclosureGroupUri parameters should be specified to get the available networks for a new server profile template.
The serverHardwareTypeUri, enclosureGroupUri, and profileTemplateUri should be specified to get available
networks for an existing server profile template.
The scopeUris parameter is ignored when the profileTemplateUri is specified.
Args:
enclosureGroupUri: The URI of the enclosure group is required when the serverHardwareTypeUri
specifies a blade server.
profileTemplateUri: If the URI of the server profile template is provided the list of available
networks will include only networks that share a scope with the server profile template.
scopeUris: An expression to restrict the resources returned according to the scopes
to which they are assigned.
serverHardwareTypeUri: If the server hardware type specifies a rack server, the list of
available network includes all networks that are applicable for the specified server hardware type.
If the server hardware type specifies a blade server, the enclosureGroupUri parameter must be
specified, and the list of available networks includes all networks that are applicable for the
specified server hardware type and all empty bays within the enclosure group that can support
the specified server hardware type.
view: The FunctionType (Ethernet or FibreChannel) to filter the list of networks returned.
Returns:
dict: Dictionary with available networks details.
"""
query_string = '&'.join('{}={}'.format(key, value)
for key, value in kwargs.items() if value)
uri = self.URI + "{}?{}".format("/available-networks", query_string)
return self._helper.do_get(uri) | [
"def",
"get_available_networks",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"query_string",
"=",
"'&'",
".",
"join",
"(",
"'{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
"for",
"key",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
"if",
"value",
")",
"uri",
"=",
"self",
".",
"URI",
"+",
"\"{}?{}\"",
".",
"format",
"(",
"\"/available-networks\"",
",",
"query_string",
")",
"return",
"self",
".",
"_helper",
".",
"do_get",
"(",
"uri",
")"
] | Retrieves the list of Ethernet networks, Fibre Channel networks and network sets that are available
to a server profile template along with their respective ports. The scopeUris, serverHardwareTypeUri and
enclosureGroupUri parameters should be specified to get the available networks for a new server profile template.
The serverHardwareTypeUri, enclosureGroupUri, and profileTemplateUri should be specified to get available
networks for an existing server profile template.
The scopeUris parameter is ignored when the profileTemplateUri is specified.
Args:
enclosureGroupUri: The URI of the enclosure group is required when the serverHardwareTypeUri
specifies a blade server.
profileTemplateUri: If the URI of the server profile template is provided the list of available
networks will include only networks that share a scope with the server profile template.
scopeUris: An expression to restrict the resources returned according to the scopes
to which they are assigned.
serverHardwareTypeUri: If the server hardware type specifies a rack server, the list of
available network includes all networks that are applicable for the specified server hardware type.
If the server hardware type specifies a blade server, the enclosureGroupUri parameter must be
specified, and the list of available networks includes all networks that are applicable for the
specified server hardware type and all empty bays within the enclosure group that can support
the specified server hardware type.
view: The FunctionType (Ethernet or FibreChannel) to filter the list of networks returned.
Returns:
dict: Dictionary with available networks details. | [
"Retrieves",
"the",
"list",
"of",
"Ethernet",
"networks",
"Fibre",
"Channel",
"networks",
"and",
"network",
"sets",
"that",
"are",
"available",
"to",
"a",
"server",
"profile",
"template",
"along",
"with",
"their",
"respective",
"ports",
".",
"The",
"scopeUris",
"serverHardwareTypeUri",
"and",
"enclosureGroupUri",
"parameters",
"should",
"be",
"specified",
"to",
"get",
"the",
"available",
"networks",
"for",
"a",
"new",
"server",
"profile",
"template",
".",
"The",
"serverHardwareTypeUri",
"enclosureGroupUri",
"and",
"profileTemplateUri",
"should",
"be",
"specified",
"to",
"get",
"available",
"networks",
"for",
"an",
"existing",
"server",
"profile",
"template",
".",
"The",
"scopeUris",
"parameter",
"is",
"ignored",
"when",
"the",
"profileTemplateUri",
"is",
"specified",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/servers/server_profile_templates.py#L181-L212 | train | 251,639 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/logical_downlinks.py | LogicalDownlinks.get_all_without_ethernet | def get_all_without_ethernet(self, start=0, count=-1, filter='', sort=''):
"""
Gets a paginated collection of logical downlinks without ethernet. The collection is
based on optional sorting and filtering and is constrained by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
dict
"""
without_ethernet_client = ResourceClient(
self._connection, "/rest/logical-downlinks/withoutEthernet")
return without_ethernet_client.get_all(start, count, filter=filter, sort=sort) | python | def get_all_without_ethernet(self, start=0, count=-1, filter='', sort=''):
"""
Gets a paginated collection of logical downlinks without ethernet. The collection is
based on optional sorting and filtering and is constrained by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
dict
"""
without_ethernet_client = ResourceClient(
self._connection, "/rest/logical-downlinks/withoutEthernet")
return without_ethernet_client.get_all(start, count, filter=filter, sort=sort) | [
"def",
"get_all_without_ethernet",
"(",
"self",
",",
"start",
"=",
"0",
",",
"count",
"=",
"-",
"1",
",",
"filter",
"=",
"''",
",",
"sort",
"=",
"''",
")",
":",
"without_ethernet_client",
"=",
"ResourceClient",
"(",
"self",
".",
"_connection",
",",
"\"/rest/logical-downlinks/withoutEthernet\"",
")",
"return",
"without_ethernet_client",
".",
"get_all",
"(",
"start",
",",
"count",
",",
"filter",
"=",
"filter",
",",
"sort",
"=",
"sort",
")"
] | Gets a paginated collection of logical downlinks without ethernet. The collection is
based on optional sorting and filtering and is constrained by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
dict | [
"Gets",
"a",
"paginated",
"collection",
"of",
"logical",
"downlinks",
"without",
"ethernet",
".",
"The",
"collection",
"is",
"based",
"on",
"optional",
"sorting",
"and",
"filtering",
"and",
"is",
"constrained",
"by",
"start",
"and",
"count",
"parameters",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/logical_downlinks.py#L99-L124 | train | 251,640 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/logical_downlinks.py | LogicalDownlinks.get_without_ethernet | def get_without_ethernet(self, id_or_uri):
"""
Gets the logical downlink with the specified ID without ethernet.
Args:
id_or_uri: Can be either the logical downlink id or the logical downlink uri.
Returns:
dict
"""
uri = self._client.build_uri(id_or_uri) + "/withoutEthernet"
return self._client.get(uri) | python | def get_without_ethernet(self, id_or_uri):
"""
Gets the logical downlink with the specified ID without ethernet.
Args:
id_or_uri: Can be either the logical downlink id or the logical downlink uri.
Returns:
dict
"""
uri = self._client.build_uri(id_or_uri) + "/withoutEthernet"
return self._client.get(uri) | [
"def",
"get_without_ethernet",
"(",
"self",
",",
"id_or_uri",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/withoutEthernet\"",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"uri",
")"
] | Gets the logical downlink with the specified ID without ethernet.
Args:
id_or_uri: Can be either the logical downlink id or the logical downlink uri.
Returns:
dict | [
"Gets",
"the",
"logical",
"downlink",
"with",
"the",
"specified",
"ID",
"without",
"ethernet",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/logical_downlinks.py#L126-L137 | train | 251,641 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/sas_logical_interconnects.py | SasLogicalInterconnects.update_firmware | def update_firmware(self, firmware_information, force=False):
"""
Installs firmware to the member interconnects of a SAS Logical Interconnect.
Args:
firmware_information: Options to install firmware to a SAS Logical Interconnect.
force: If sets to true, the operation completes despite any problems with the network connectivy
or the erros on the resource itself.
Returns:
dict: SAS Logical Interconnect Firmware.
"""
firmware_uri = "{}/firmware".format(self.data["uri"])
result = self._helper.update(firmware_information, firmware_uri, force=force)
self.refresh()
return result | python | def update_firmware(self, firmware_information, force=False):
"""
Installs firmware to the member interconnects of a SAS Logical Interconnect.
Args:
firmware_information: Options to install firmware to a SAS Logical Interconnect.
force: If sets to true, the operation completes despite any problems with the network connectivy
or the erros on the resource itself.
Returns:
dict: SAS Logical Interconnect Firmware.
"""
firmware_uri = "{}/firmware".format(self.data["uri"])
result = self._helper.update(firmware_information, firmware_uri, force=force)
self.refresh()
return result | [
"def",
"update_firmware",
"(",
"self",
",",
"firmware_information",
",",
"force",
"=",
"False",
")",
":",
"firmware_uri",
"=",
"\"{}/firmware\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
")",
"result",
"=",
"self",
".",
"_helper",
".",
"update",
"(",
"firmware_information",
",",
"firmware_uri",
",",
"force",
"=",
"force",
")",
"self",
".",
"refresh",
"(",
")",
"return",
"result"
] | Installs firmware to the member interconnects of a SAS Logical Interconnect.
Args:
firmware_information: Options to install firmware to a SAS Logical Interconnect.
force: If sets to true, the operation completes despite any problems with the network connectivy
or the erros on the resource itself.
Returns:
dict: SAS Logical Interconnect Firmware. | [
"Installs",
"firmware",
"to",
"the",
"member",
"interconnects",
"of",
"a",
"SAS",
"Logical",
"Interconnect",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L82-L97 | train | 251,642 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/sas_logical_interconnects.py | SasLogicalInterconnects.get_firmware | def get_firmware(self):
"""
Gets baseline firmware information for a SAS Logical Interconnect.
Returns:
dict: SAS Logical Interconnect Firmware.
"""
firmware_uri = "{}/firmware".format(self.data["uri"])
return self._helper.do_get(firmware_uri) | python | def get_firmware(self):
"""
Gets baseline firmware information for a SAS Logical Interconnect.
Returns:
dict: SAS Logical Interconnect Firmware.
"""
firmware_uri = "{}/firmware".format(self.data["uri"])
return self._helper.do_get(firmware_uri) | [
"def",
"get_firmware",
"(",
"self",
")",
":",
"firmware_uri",
"=",
"\"{}/firmware\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
")",
"return",
"self",
".",
"_helper",
".",
"do_get",
"(",
"firmware_uri",
")"
] | Gets baseline firmware information for a SAS Logical Interconnect.
Returns:
dict: SAS Logical Interconnect Firmware. | [
"Gets",
"baseline",
"firmware",
"information",
"for",
"a",
"SAS",
"Logical",
"Interconnect",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L100-L108 | train | 251,643 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/sas_logical_interconnects.py | SasLogicalInterconnects.update_compliance_all | def update_compliance_all(self, information, timeout=-1):
"""
Returns SAS Logical Interconnects to a consistent state. The current SAS Logical Interconnect state is
compared to the associated SAS Logical Interconnect group.
Args:
information: Can be either the resource ID or URI.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: SAS Logical Interconnect.
"""
uri = self.URI + "/compliance"
result = self._helper.update(information, uri, timeout=timeout)
return result | python | def update_compliance_all(self, information, timeout=-1):
"""
Returns SAS Logical Interconnects to a consistent state. The current SAS Logical Interconnect state is
compared to the associated SAS Logical Interconnect group.
Args:
information: Can be either the resource ID or URI.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: SAS Logical Interconnect.
"""
uri = self.URI + "/compliance"
result = self._helper.update(information, uri, timeout=timeout)
return result | [
"def",
"update_compliance_all",
"(",
"self",
",",
"information",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"uri",
"=",
"self",
".",
"URI",
"+",
"\"/compliance\"",
"result",
"=",
"self",
".",
"_helper",
".",
"update",
"(",
"information",
",",
"uri",
",",
"timeout",
"=",
"timeout",
")",
"return",
"result"
] | Returns SAS Logical Interconnects to a consistent state. The current SAS Logical Interconnect state is
compared to the associated SAS Logical Interconnect group.
Args:
information: Can be either the resource ID or URI.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: SAS Logical Interconnect. | [
"Returns",
"SAS",
"Logical",
"Interconnects",
"to",
"a",
"consistent",
"state",
".",
"The",
"current",
"SAS",
"Logical",
"Interconnect",
"state",
"is",
"compared",
"to",
"the",
"associated",
"SAS",
"Logical",
"Interconnect",
"group",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L110-L127 | train | 251,644 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/sas_logical_interconnects.py | SasLogicalInterconnects.replace_drive_enclosure | def replace_drive_enclosure(self, information):
"""
When a drive enclosure has been physically replaced, initiate the replacement operation that enables the
new drive enclosure to take over as a replacement for the prior drive enclosure. The request requires
specification of both the serial numbers of the original drive enclosure and its replacement to be provided.
Args:
information: Options to replace the drive enclosure.
Returns:
dict: SAS Logical Interconnect.
"""
uri = "{}/replaceDriveEnclosure".format(self.data["uri"])
result = self._helper.create(information, uri)
self.refresh()
return result | python | def replace_drive_enclosure(self, information):
"""
When a drive enclosure has been physically replaced, initiate the replacement operation that enables the
new drive enclosure to take over as a replacement for the prior drive enclosure. The request requires
specification of both the serial numbers of the original drive enclosure and its replacement to be provided.
Args:
information: Options to replace the drive enclosure.
Returns:
dict: SAS Logical Interconnect.
"""
uri = "{}/replaceDriveEnclosure".format(self.data["uri"])
result = self._helper.create(information, uri)
self.refresh()
return result | [
"def",
"replace_drive_enclosure",
"(",
"self",
",",
"information",
")",
":",
"uri",
"=",
"\"{}/replaceDriveEnclosure\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
")",
"result",
"=",
"self",
".",
"_helper",
".",
"create",
"(",
"information",
",",
"uri",
")",
"self",
".",
"refresh",
"(",
")",
"return",
"result"
] | When a drive enclosure has been physically replaced, initiate the replacement operation that enables the
new drive enclosure to take over as a replacement for the prior drive enclosure. The request requires
specification of both the serial numbers of the original drive enclosure and its replacement to be provided.
Args:
information: Options to replace the drive enclosure.
Returns:
dict: SAS Logical Interconnect. | [
"When",
"a",
"drive",
"enclosure",
"has",
"been",
"physically",
"replaced",
"initiate",
"the",
"replacement",
"operation",
"that",
"enables",
"the",
"new",
"drive",
"enclosure",
"to",
"take",
"over",
"as",
"a",
"replacement",
"for",
"the",
"prior",
"drive",
"enclosure",
".",
"The",
"request",
"requires",
"specification",
"of",
"both",
"the",
"serial",
"numbers",
"of",
"the",
"original",
"drive",
"enclosure",
"and",
"its",
"replacement",
"to",
"be",
"provided",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L149-L166 | train | 251,645 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/sas_logical_interconnects.py | SasLogicalInterconnects.update_configuration | def update_configuration(self):
"""
Asynchronously applies or re-applies the SAS Logical Interconnect configuration to all managed interconnects
of a SAS Logical Interconnect.
Returns:
dict: SAS Logical Interconnect.
"""
uri = "{}/configuration".format(self.data["uri"])
result = self._helper.update({}, uri)
self.refresh()
return result | python | def update_configuration(self):
"""
Asynchronously applies or re-applies the SAS Logical Interconnect configuration to all managed interconnects
of a SAS Logical Interconnect.
Returns:
dict: SAS Logical Interconnect.
"""
uri = "{}/configuration".format(self.data["uri"])
result = self._helper.update({}, uri)
self.refresh()
return result | [
"def",
"update_configuration",
"(",
"self",
")",
":",
"uri",
"=",
"\"{}/configuration\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
")",
"result",
"=",
"self",
".",
"_helper",
".",
"update",
"(",
"{",
"}",
",",
"uri",
")",
"self",
".",
"refresh",
"(",
")",
"return",
"result"
] | Asynchronously applies or re-applies the SAS Logical Interconnect configuration to all managed interconnects
of a SAS Logical Interconnect.
Returns:
dict: SAS Logical Interconnect. | [
"Asynchronously",
"applies",
"or",
"re",
"-",
"applies",
"the",
"SAS",
"Logical",
"Interconnect",
"configuration",
"to",
"all",
"managed",
"interconnects",
"of",
"a",
"SAS",
"Logical",
"Interconnect",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L169-L181 | train | 251,646 |
vals/umis | umis/barcodes.py | mutationhash | def mutationhash(strings, nedit):
"""
produce a hash with each key a nedit distance substitution for a set of
strings. values of the hash is the set of strings the substitution could
have come from
"""
maxlen = max([len(string) for string in strings])
indexes = generate_idx(maxlen, nedit)
muthash = defaultdict(set)
for string in strings:
muthash[string].update([string])
for x in substitution_set(string, indexes):
muthash[x].update([string])
return muthash | python | def mutationhash(strings, nedit):
"""
produce a hash with each key a nedit distance substitution for a set of
strings. values of the hash is the set of strings the substitution could
have come from
"""
maxlen = max([len(string) for string in strings])
indexes = generate_idx(maxlen, nedit)
muthash = defaultdict(set)
for string in strings:
muthash[string].update([string])
for x in substitution_set(string, indexes):
muthash[x].update([string])
return muthash | [
"def",
"mutationhash",
"(",
"strings",
",",
"nedit",
")",
":",
"maxlen",
"=",
"max",
"(",
"[",
"len",
"(",
"string",
")",
"for",
"string",
"in",
"strings",
"]",
")",
"indexes",
"=",
"generate_idx",
"(",
"maxlen",
",",
"nedit",
")",
"muthash",
"=",
"defaultdict",
"(",
"set",
")",
"for",
"string",
"in",
"strings",
":",
"muthash",
"[",
"string",
"]",
".",
"update",
"(",
"[",
"string",
"]",
")",
"for",
"x",
"in",
"substitution_set",
"(",
"string",
",",
"indexes",
")",
":",
"muthash",
"[",
"x",
"]",
".",
"update",
"(",
"[",
"string",
"]",
")",
"return",
"muthash"
] | produce a hash with each key a nedit distance substitution for a set of
strings. values of the hash is the set of strings the substitution could
have come from | [
"produce",
"a",
"hash",
"with",
"each",
"key",
"a",
"nedit",
"distance",
"substitution",
"for",
"a",
"set",
"of",
"strings",
".",
"values",
"of",
"the",
"hash",
"is",
"the",
"set",
"of",
"strings",
"the",
"substitution",
"could",
"have",
"come",
"from"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L150-L163 | train | 251,647 |
vals/umis | umis/barcodes.py | substitution_set | def substitution_set(string, indexes):
"""
for a string, return a set of all possible substitutions
"""
strlen = len(string)
return {mutate_string(string, x) for x in indexes if valid_substitution(strlen, x)} | python | def substitution_set(string, indexes):
"""
for a string, return a set of all possible substitutions
"""
strlen = len(string)
return {mutate_string(string, x) for x in indexes if valid_substitution(strlen, x)} | [
"def",
"substitution_set",
"(",
"string",
",",
"indexes",
")",
":",
"strlen",
"=",
"len",
"(",
"string",
")",
"return",
"{",
"mutate_string",
"(",
"string",
",",
"x",
")",
"for",
"x",
"in",
"indexes",
"if",
"valid_substitution",
"(",
"strlen",
",",
"x",
")",
"}"
] | for a string, return a set of all possible substitutions | [
"for",
"a",
"string",
"return",
"a",
"set",
"of",
"all",
"possible",
"substitutions"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L165-L170 | train | 251,648 |
vals/umis | umis/barcodes.py | valid_substitution | def valid_substitution(strlen, index):
"""
skip performing substitutions that are outside the bounds of the string
"""
values = index[0]
return all([strlen > i for i in values]) | python | def valid_substitution(strlen, index):
"""
skip performing substitutions that are outside the bounds of the string
"""
values = index[0]
return all([strlen > i for i in values]) | [
"def",
"valid_substitution",
"(",
"strlen",
",",
"index",
")",
":",
"values",
"=",
"index",
"[",
"0",
"]",
"return",
"all",
"(",
"[",
"strlen",
">",
"i",
"for",
"i",
"in",
"values",
"]",
")"
] | skip performing substitutions that are outside the bounds of the string | [
"skip",
"performing",
"substitutions",
"that",
"are",
"outside",
"the",
"bounds",
"of",
"the",
"string"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L172-L177 | train | 251,649 |
vals/umis | umis/barcodes.py | acgt_match | def acgt_match(string):
"""
returns True if sting consist of only "A "C" "G" "T"
"""
search = re.compile(r'[^ACGT]').search
return not bool(search(string)) | python | def acgt_match(string):
"""
returns True if sting consist of only "A "C" "G" "T"
"""
search = re.compile(r'[^ACGT]').search
return not bool(search(string)) | [
"def",
"acgt_match",
"(",
"string",
")",
":",
"search",
"=",
"re",
".",
"compile",
"(",
"r'[^ACGT]'",
")",
".",
"search",
"return",
"not",
"bool",
"(",
"search",
"(",
"string",
")",
")"
] | returns True if sting consist of only "A "C" "G" "T" | [
"returns",
"True",
"if",
"sting",
"consist",
"of",
"only",
"A",
"C",
"G",
"T"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L194-L199 | train | 251,650 |
vals/umis | umis/umis.py | stream_fastq | def stream_fastq(file_handler):
''' Generator which gives all four lines if a fastq read as one string
'''
next_element = ''
for i, line in enumerate(file_handler):
next_element += line
if i % 4 == 3:
yield next_element
next_element = '' | python | def stream_fastq(file_handler):
''' Generator which gives all four lines if a fastq read as one string
'''
next_element = ''
for i, line in enumerate(file_handler):
next_element += line
if i % 4 == 3:
yield next_element
next_element = '' | [
"def",
"stream_fastq",
"(",
"file_handler",
")",
":",
"next_element",
"=",
"''",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"file_handler",
")",
":",
"next_element",
"+=",
"line",
"if",
"i",
"%",
"4",
"==",
"3",
":",
"yield",
"next_element",
"next_element",
"=",
"''"
] | Generator which gives all four lines if a fastq read as one string | [
"Generator",
"which",
"gives",
"all",
"four",
"lines",
"if",
"a",
"fastq",
"read",
"as",
"one",
"string"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L59-L67 | train | 251,651 |
vals/umis | umis/umis.py | read_fastq | def read_fastq(filename):
"""
return a stream of FASTQ entries, handling gzipped and empty files
"""
if not filename:
return itertools.cycle((None,))
if filename == "-":
filename_fh = sys.stdin
elif filename.endswith('gz'):
if is_python3:
filename_fh = gzip.open(filename, mode='rt')
else:
filename_fh = BufferedReader(gzip.open(filename, mode='rt'))
else:
filename_fh = open(filename)
return stream_fastq(filename_fh) | python | def read_fastq(filename):
"""
return a stream of FASTQ entries, handling gzipped and empty files
"""
if not filename:
return itertools.cycle((None,))
if filename == "-":
filename_fh = sys.stdin
elif filename.endswith('gz'):
if is_python3:
filename_fh = gzip.open(filename, mode='rt')
else:
filename_fh = BufferedReader(gzip.open(filename, mode='rt'))
else:
filename_fh = open(filename)
return stream_fastq(filename_fh) | [
"def",
"read_fastq",
"(",
"filename",
")",
":",
"if",
"not",
"filename",
":",
"return",
"itertools",
".",
"cycle",
"(",
"(",
"None",
",",
")",
")",
"if",
"filename",
"==",
"\"-\"",
":",
"filename_fh",
"=",
"sys",
".",
"stdin",
"elif",
"filename",
".",
"endswith",
"(",
"'gz'",
")",
":",
"if",
"is_python3",
":",
"filename_fh",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"mode",
"=",
"'rt'",
")",
"else",
":",
"filename_fh",
"=",
"BufferedReader",
"(",
"gzip",
".",
"open",
"(",
"filename",
",",
"mode",
"=",
"'rt'",
")",
")",
"else",
":",
"filename_fh",
"=",
"open",
"(",
"filename",
")",
"return",
"stream_fastq",
"(",
"filename_fh",
")"
] | return a stream of FASTQ entries, handling gzipped and empty files | [
"return",
"a",
"stream",
"of",
"FASTQ",
"entries",
"handling",
"gzipped",
"and",
"empty",
"files"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L69-L84 | train | 251,652 |
vals/umis | umis/umis.py | write_fastq | def write_fastq(filename):
"""
return a handle for FASTQ writing, handling gzipped files
"""
if filename:
if filename.endswith('gz'):
filename_fh = gzip.open(filename, mode='wb')
else:
filename_fh = open(filename, mode='w')
else:
filename_fh = None
return filename_fh | python | def write_fastq(filename):
"""
return a handle for FASTQ writing, handling gzipped files
"""
if filename:
if filename.endswith('gz'):
filename_fh = gzip.open(filename, mode='wb')
else:
filename_fh = open(filename, mode='w')
else:
filename_fh = None
return filename_fh | [
"def",
"write_fastq",
"(",
"filename",
")",
":",
"if",
"filename",
":",
"if",
"filename",
".",
"endswith",
"(",
"'gz'",
")",
":",
"filename_fh",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"mode",
"=",
"'wb'",
")",
"else",
":",
"filename_fh",
"=",
"open",
"(",
"filename",
",",
"mode",
"=",
"'w'",
")",
"else",
":",
"filename_fh",
"=",
"None",
"return",
"filename_fh"
] | return a handle for FASTQ writing, handling gzipped files | [
"return",
"a",
"handle",
"for",
"FASTQ",
"writing",
"handling",
"gzipped",
"files"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L96-L107 | train | 251,653 |
vals/umis | umis/umis.py | detect_alignment_annotations | def detect_alignment_annotations(queryalignment, tags=False):
"""
detects the annotations present in a SAM file, inspecting either the
tags or the query names and returns a set of annotations present
"""
annotations = set()
for k, v in BARCODEINFO.items():
if tags:
if queryalignment.has_tag(v.bamtag):
annotations.add(k)
else:
if v.readprefix in queryalignment.qname:
annotations.add(k)
return annotations | python | def detect_alignment_annotations(queryalignment, tags=False):
"""
detects the annotations present in a SAM file, inspecting either the
tags or the query names and returns a set of annotations present
"""
annotations = set()
for k, v in BARCODEINFO.items():
if tags:
if queryalignment.has_tag(v.bamtag):
annotations.add(k)
else:
if v.readprefix in queryalignment.qname:
annotations.add(k)
return annotations | [
"def",
"detect_alignment_annotations",
"(",
"queryalignment",
",",
"tags",
"=",
"False",
")",
":",
"annotations",
"=",
"set",
"(",
")",
"for",
"k",
",",
"v",
"in",
"BARCODEINFO",
".",
"items",
"(",
")",
":",
"if",
"tags",
":",
"if",
"queryalignment",
".",
"has_tag",
"(",
"v",
".",
"bamtag",
")",
":",
"annotations",
".",
"add",
"(",
"k",
")",
"else",
":",
"if",
"v",
".",
"readprefix",
"in",
"queryalignment",
".",
"qname",
":",
"annotations",
".",
"add",
"(",
"k",
")",
"return",
"annotations"
] | detects the annotations present in a SAM file, inspecting either the
tags or the query names and returns a set of annotations present | [
"detects",
"the",
"annotations",
"present",
"in",
"a",
"SAM",
"file",
"inspecting",
"either",
"the",
"tags",
"or",
"the",
"query",
"names",
"and",
"returns",
"a",
"set",
"of",
"annotations",
"present"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L122-L135 | train | 251,654 |
vals/umis | umis/umis.py | detect_fastq_annotations | def detect_fastq_annotations(fastq_file):
"""
detects annotations preesent in a FASTQ file by examining the first read
"""
annotations = set()
queryread = tz.first(read_fastq(fastq_file))
for k, v in BARCODEINFO.items():
if v.readprefix in queryread:
annotations.add(k)
return annotations | python | def detect_fastq_annotations(fastq_file):
"""
detects annotations preesent in a FASTQ file by examining the first read
"""
annotations = set()
queryread = tz.first(read_fastq(fastq_file))
for k, v in BARCODEINFO.items():
if v.readprefix in queryread:
annotations.add(k)
return annotations | [
"def",
"detect_fastq_annotations",
"(",
"fastq_file",
")",
":",
"annotations",
"=",
"set",
"(",
")",
"queryread",
"=",
"tz",
".",
"first",
"(",
"read_fastq",
"(",
"fastq_file",
")",
")",
"for",
"k",
",",
"v",
"in",
"BARCODEINFO",
".",
"items",
"(",
")",
":",
"if",
"v",
".",
"readprefix",
"in",
"queryread",
":",
"annotations",
".",
"add",
"(",
"k",
")",
"return",
"annotations"
] | detects annotations preesent in a FASTQ file by examining the first read | [
"detects",
"annotations",
"preesent",
"in",
"a",
"FASTQ",
"file",
"by",
"examining",
"the",
"first",
"read"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L137-L146 | train | 251,655 |
vals/umis | umis/umis.py | construct_transformed_regex | def construct_transformed_regex(annotations):
"""
construct a regex that matches possible fields in a transformed file
annotations is a set of which keys in BARCODEINFO are present in the file
"""
re_string = '.*'
if "cellular" in annotations:
re_string += ":CELL_(?P<CB>.*)"
if "molecular" in annotations:
re_string += ":UMI_(?P<MB>\w*)"
if "sample" in annotations:
re_string += ":SAMPLE_(?P<SB>\w*)"
if re_string == ".*":
logger.error("No annotation present on this file, aborting.")
sys.exit(1)
return re_string | python | def construct_transformed_regex(annotations):
"""
construct a regex that matches possible fields in a transformed file
annotations is a set of which keys in BARCODEINFO are present in the file
"""
re_string = '.*'
if "cellular" in annotations:
re_string += ":CELL_(?P<CB>.*)"
if "molecular" in annotations:
re_string += ":UMI_(?P<MB>\w*)"
if "sample" in annotations:
re_string += ":SAMPLE_(?P<SB>\w*)"
if re_string == ".*":
logger.error("No annotation present on this file, aborting.")
sys.exit(1)
return re_string | [
"def",
"construct_transformed_regex",
"(",
"annotations",
")",
":",
"re_string",
"=",
"'.*'",
"if",
"\"cellular\"",
"in",
"annotations",
":",
"re_string",
"+=",
"\":CELL_(?P<CB>.*)\"",
"if",
"\"molecular\"",
"in",
"annotations",
":",
"re_string",
"+=",
"\":UMI_(?P<MB>\\w*)\"",
"if",
"\"sample\"",
"in",
"annotations",
":",
"re_string",
"+=",
"\":SAMPLE_(?P<SB>\\w*)\"",
"if",
"re_string",
"==",
"\".*\"",
":",
"logger",
".",
"error",
"(",
"\"No annotation present on this file, aborting.\"",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"return",
"re_string"
] | construct a regex that matches possible fields in a transformed file
annotations is a set of which keys in BARCODEINFO are present in the file | [
"construct",
"a",
"regex",
"that",
"matches",
"possible",
"fields",
"in",
"a",
"transformed",
"file",
"annotations",
"is",
"a",
"set",
"of",
"which",
"keys",
"in",
"BARCODEINFO",
"are",
"present",
"in",
"the",
"file"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L148-L163 | train | 251,656 |
vals/umis | umis/umis.py | _infer_transform_options | def _infer_transform_options(transform):
"""
figure out what transform options should be by examining the provided
regexes for keywords
"""
TransformOptions = collections.namedtuple("TransformOptions",
['CB', 'dual_index', 'triple_index', 'MB', 'SB'])
CB = False
SB = False
MB = False
dual_index = False
triple_index = False
for rx in transform.values():
if not rx:
continue
if "CB1" in rx:
if "CB3" in rx:
triple_index = True
else:
dual_index = True
if "SB" in rx:
SB = True
if "CB" in rx:
CB = True
if "MB" in rx:
MB = True
return TransformOptions(CB=CB, dual_index=dual_index, triple_index=triple_index, MB=MB, SB=SB) | python | def _infer_transform_options(transform):
"""
figure out what transform options should be by examining the provided
regexes for keywords
"""
TransformOptions = collections.namedtuple("TransformOptions",
['CB', 'dual_index', 'triple_index', 'MB', 'SB'])
CB = False
SB = False
MB = False
dual_index = False
triple_index = False
for rx in transform.values():
if not rx:
continue
if "CB1" in rx:
if "CB3" in rx:
triple_index = True
else:
dual_index = True
if "SB" in rx:
SB = True
if "CB" in rx:
CB = True
if "MB" in rx:
MB = True
return TransformOptions(CB=CB, dual_index=dual_index, triple_index=triple_index, MB=MB, SB=SB) | [
"def",
"_infer_transform_options",
"(",
"transform",
")",
":",
"TransformOptions",
"=",
"collections",
".",
"namedtuple",
"(",
"\"TransformOptions\"",
",",
"[",
"'CB'",
",",
"'dual_index'",
",",
"'triple_index'",
",",
"'MB'",
",",
"'SB'",
"]",
")",
"CB",
"=",
"False",
"SB",
"=",
"False",
"MB",
"=",
"False",
"dual_index",
"=",
"False",
"triple_index",
"=",
"False",
"for",
"rx",
"in",
"transform",
".",
"values",
"(",
")",
":",
"if",
"not",
"rx",
":",
"continue",
"if",
"\"CB1\"",
"in",
"rx",
":",
"if",
"\"CB3\"",
"in",
"rx",
":",
"triple_index",
"=",
"True",
"else",
":",
"dual_index",
"=",
"True",
"if",
"\"SB\"",
"in",
"rx",
":",
"SB",
"=",
"True",
"if",
"\"CB\"",
"in",
"rx",
":",
"CB",
"=",
"True",
"if",
"\"MB\"",
"in",
"rx",
":",
"MB",
"=",
"True",
"return",
"TransformOptions",
"(",
"CB",
"=",
"CB",
",",
"dual_index",
"=",
"dual_index",
",",
"triple_index",
"=",
"triple_index",
",",
"MB",
"=",
"MB",
",",
"SB",
"=",
"SB",
")"
] | figure out what transform options should be by examining the provided
regexes for keywords | [
"figure",
"out",
"what",
"transform",
"options",
"should",
"be",
"by",
"examining",
"the",
"provided",
"regexes",
"for",
"keywords"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L306-L332 | train | 251,657 |
vals/umis | umis/umis.py | _extract_readnum | def _extract_readnum(read_dict):
"""Extract read numbers from old-style fastqs.
Handles read 1 and 2 specifications where naming is
readname/1 readname/2
"""
pat = re.compile(r"(?P<readnum>/\d+)$")
parts = pat.split(read_dict["name"])
if len(parts) == 3:
name, readnum, endofline = parts
read_dict["name"] = name
read_dict["readnum"] = readnum
else:
read_dict["readnum"] = ""
return read_dict | python | def _extract_readnum(read_dict):
"""Extract read numbers from old-style fastqs.
Handles read 1 and 2 specifications where naming is
readname/1 readname/2
"""
pat = re.compile(r"(?P<readnum>/\d+)$")
parts = pat.split(read_dict["name"])
if len(parts) == 3:
name, readnum, endofline = parts
read_dict["name"] = name
read_dict["readnum"] = readnum
else:
read_dict["readnum"] = ""
return read_dict | [
"def",
"_extract_readnum",
"(",
"read_dict",
")",
":",
"pat",
"=",
"re",
".",
"compile",
"(",
"r\"(?P<readnum>/\\d+)$\"",
")",
"parts",
"=",
"pat",
".",
"split",
"(",
"read_dict",
"[",
"\"name\"",
"]",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"3",
":",
"name",
",",
"readnum",
",",
"endofline",
"=",
"parts",
"read_dict",
"[",
"\"name\"",
"]",
"=",
"name",
"read_dict",
"[",
"\"readnum\"",
"]",
"=",
"readnum",
"else",
":",
"read_dict",
"[",
"\"readnum\"",
"]",
"=",
"\"\"",
"return",
"read_dict"
] | Extract read numbers from old-style fastqs.
Handles read 1 and 2 specifications where naming is
readname/1 readname/2 | [
"Extract",
"read",
"numbers",
"from",
"old",
"-",
"style",
"fastqs",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L334-L348 | train | 251,658 |
vals/umis | umis/umis.py | sparse | def sparse(csv, sparse):
''' Convert a CSV file to a sparse matrix with rows and column names
saved as companion files.
'''
import pandas as pd
df = pd.read_csv(csv, index_col=0, header=0)
pd.Series(df.index).to_csv(sparse + ".rownames", index=False)
pd.Series(df.columns.values).to_csv(sparse + ".colnames", index=False)
with open(sparse, "w+b") as out_handle:
scipy.io.mmwrite(out_handle, scipy.sparse.csr_matrix(df)) | python | def sparse(csv, sparse):
''' Convert a CSV file to a sparse matrix with rows and column names
saved as companion files.
'''
import pandas as pd
df = pd.read_csv(csv, index_col=0, header=0)
pd.Series(df.index).to_csv(sparse + ".rownames", index=False)
pd.Series(df.columns.values).to_csv(sparse + ".colnames", index=False)
with open(sparse, "w+b") as out_handle:
scipy.io.mmwrite(out_handle, scipy.sparse.csr_matrix(df)) | [
"def",
"sparse",
"(",
"csv",
",",
"sparse",
")",
":",
"import",
"pandas",
"as",
"pd",
"df",
"=",
"pd",
".",
"read_csv",
"(",
"csv",
",",
"index_col",
"=",
"0",
",",
"header",
"=",
"0",
")",
"pd",
".",
"Series",
"(",
"df",
".",
"index",
")",
".",
"to_csv",
"(",
"sparse",
"+",
"\".rownames\"",
",",
"index",
"=",
"False",
")",
"pd",
".",
"Series",
"(",
"df",
".",
"columns",
".",
"values",
")",
".",
"to_csv",
"(",
"sparse",
"+",
"\".colnames\"",
",",
"index",
"=",
"False",
")",
"with",
"open",
"(",
"sparse",
",",
"\"w+b\"",
")",
"as",
"out_handle",
":",
"scipy",
".",
"io",
".",
"mmwrite",
"(",
"out_handle",
",",
"scipy",
".",
"sparse",
".",
"csr_matrix",
"(",
"df",
")",
")"
] | Convert a CSV file to a sparse matrix with rows and column names
saved as companion files. | [
"Convert",
"a",
"CSV",
"file",
"to",
"a",
"sparse",
"matrix",
"with",
"rows",
"and",
"column",
"names",
"saved",
"as",
"companion",
"files",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L943-L952 | train | 251,659 |
vals/umis | umis/umis.py | cb_histogram | def cb_histogram(fastq, umi_histogram):
''' Counts the number of reads for each cellular barcode
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
cb_counter = collections.Counter()
umi_counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
cb = match['CB']
cb_counter[cb] += 1
if umi_histogram:
umi = match['MB']
umi_counter[(cb, umi)] += 1
for bc, count in cb_counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count))
if umi_histogram:
with open(umi_histogram, "w") as umi_handle:
for cbumi, count in umi_counter.most_common():
umi_handle.write('{}\t{}\t{}\n'.format(cbumi[0], cbumi[1], count)) | python | def cb_histogram(fastq, umi_histogram):
''' Counts the number of reads for each cellular barcode
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
cb_counter = collections.Counter()
umi_counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
cb = match['CB']
cb_counter[cb] += 1
if umi_histogram:
umi = match['MB']
umi_counter[(cb, umi)] += 1
for bc, count in cb_counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count))
if umi_histogram:
with open(umi_histogram, "w") as umi_handle:
for cbumi, count in umi_counter.most_common():
umi_handle.write('{}\t{}\t{}\n'.format(cbumi[0], cbumi[1], count)) | [
"def",
"cb_histogram",
"(",
"fastq",
",",
"umi_histogram",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"cb_counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"umi_counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"cb",
"=",
"match",
"[",
"'CB'",
"]",
"cb_counter",
"[",
"cb",
"]",
"+=",
"1",
"if",
"umi_histogram",
":",
"umi",
"=",
"match",
"[",
"'MB'",
"]",
"umi_counter",
"[",
"(",
"cb",
",",
"umi",
")",
"]",
"+=",
"1",
"for",
"bc",
",",
"count",
"in",
"cb_counter",
".",
"most_common",
"(",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'{}\\t{}\\n'",
".",
"format",
"(",
"bc",
",",
"count",
")",
")",
"if",
"umi_histogram",
":",
"with",
"open",
"(",
"umi_histogram",
",",
"\"w\"",
")",
"as",
"umi_handle",
":",
"for",
"cbumi",
",",
"count",
"in",
"umi_counter",
".",
"most_common",
"(",
")",
":",
"umi_handle",
".",
"write",
"(",
"'{}\\t{}\\t{}\\n'",
".",
"format",
"(",
"cbumi",
"[",
"0",
"]",
",",
"cbumi",
"[",
"1",
"]",
",",
"count",
")",
")"
] | Counts the number of reads for each cellular barcode
Expects formatted fastq files. | [
"Counts",
"the",
"number",
"of",
"reads",
"for",
"each",
"cellular",
"barcode"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L959-L985 | train | 251,660 |
vals/umis | umis/umis.py | umi_histogram | def umi_histogram(fastq):
''' Counts the number of reads for each UMI
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
counter[match['MB']] += 1
for bc, count in counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count)) | python | def umi_histogram(fastq):
''' Counts the number of reads for each UMI
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
counter[match['MB']] += 1
for bc, count in counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count)) | [
"def",
"umi_histogram",
"(",
"fastq",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"counter",
"[",
"match",
"[",
"'MB'",
"]",
"]",
"+=",
"1",
"for",
"bc",
",",
"count",
"in",
"counter",
".",
"most_common",
"(",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'{}\\t{}\\n'",
".",
"format",
"(",
"bc",
",",
"count",
")",
")"
] | Counts the number of reads for each UMI
Expects formatted fastq files. | [
"Counts",
"the",
"number",
"of",
"reads",
"for",
"each",
"UMI"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L989-L1004 | train | 251,661 |
vals/umis | umis/umis.py | get_cb_depth_set | def get_cb_depth_set(cb_histogram, cb_cutoff):
''' Returns a set of barcodes with a minimum number of reads
'''
cb_keep_set = set()
if not cb_histogram:
return cb_keep_set
with read_cbhistogram(cb_histogram) as fh:
cb_map = dict(p.strip().split() for p in fh)
cb_keep_set = set([k for k, v in cb_map.items() if int(v) > cb_cutoff])
logger.info('Keeping %d out of %d cellular barcodes.'
% (len(cb_keep_set), len(cb_map)))
return cb_keep_set | python | def get_cb_depth_set(cb_histogram, cb_cutoff):
''' Returns a set of barcodes with a minimum number of reads
'''
cb_keep_set = set()
if not cb_histogram:
return cb_keep_set
with read_cbhistogram(cb_histogram) as fh:
cb_map = dict(p.strip().split() for p in fh)
cb_keep_set = set([k for k, v in cb_map.items() if int(v) > cb_cutoff])
logger.info('Keeping %d out of %d cellular barcodes.'
% (len(cb_keep_set), len(cb_map)))
return cb_keep_set | [
"def",
"get_cb_depth_set",
"(",
"cb_histogram",
",",
"cb_cutoff",
")",
":",
"cb_keep_set",
"=",
"set",
"(",
")",
"if",
"not",
"cb_histogram",
":",
"return",
"cb_keep_set",
"with",
"read_cbhistogram",
"(",
"cb_histogram",
")",
"as",
"fh",
":",
"cb_map",
"=",
"dict",
"(",
"p",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"for",
"p",
"in",
"fh",
")",
"cb_keep_set",
"=",
"set",
"(",
"[",
"k",
"for",
"k",
",",
"v",
"in",
"cb_map",
".",
"items",
"(",
")",
"if",
"int",
"(",
"v",
")",
">",
"cb_cutoff",
"]",
")",
"logger",
".",
"info",
"(",
"'Keeping %d out of %d cellular barcodes.'",
"%",
"(",
"len",
"(",
"cb_keep_set",
")",
",",
"len",
"(",
"cb_map",
")",
")",
")",
"return",
"cb_keep_set"
] | Returns a set of barcodes with a minimum number of reads | [
"Returns",
"a",
"set",
"of",
"barcodes",
"with",
"a",
"minimum",
"number",
"of",
"reads"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1006-L1018 | train | 251,662 |
vals/umis | umis/umis.py | guess_depth_cutoff | def guess_depth_cutoff(cb_histogram):
''' Guesses at an appropriate barcode cutoff
'''
with read_cbhistogram(cb_histogram) as fh:
cb_vals = [int(p.strip().split()[1]) for p in fh]
histo = np.histogram(np.log10(cb_vals), bins=50)
vals = histo[0]
edges = histo[1]
mids = np.array([(edges[i] + edges[i+1])/2 for i in range(edges.size - 1)])
wdensity = vals * (10**mids) / sum(vals * (10**mids))
baseline = np.median(wdensity)
wdensity = list(wdensity)
# find highest density in upper half of barcode distribution
peak = wdensity.index(max(wdensity[len(wdensity)/2:]))
cutoff = None
for index, dens in reversed(list(enumerate(wdensity[1:peak]))):
if dens < 2 * baseline:
cutoff = index
break
if not cutoff:
return None
else:
cutoff = 10**mids[cutoff]
logger.info('Setting barcode cutoff to %d' % cutoff)
return cutoff | python | def guess_depth_cutoff(cb_histogram):
''' Guesses at an appropriate barcode cutoff
'''
with read_cbhistogram(cb_histogram) as fh:
cb_vals = [int(p.strip().split()[1]) for p in fh]
histo = np.histogram(np.log10(cb_vals), bins=50)
vals = histo[0]
edges = histo[1]
mids = np.array([(edges[i] + edges[i+1])/2 for i in range(edges.size - 1)])
wdensity = vals * (10**mids) / sum(vals * (10**mids))
baseline = np.median(wdensity)
wdensity = list(wdensity)
# find highest density in upper half of barcode distribution
peak = wdensity.index(max(wdensity[len(wdensity)/2:]))
cutoff = None
for index, dens in reversed(list(enumerate(wdensity[1:peak]))):
if dens < 2 * baseline:
cutoff = index
break
if not cutoff:
return None
else:
cutoff = 10**mids[cutoff]
logger.info('Setting barcode cutoff to %d' % cutoff)
return cutoff | [
"def",
"guess_depth_cutoff",
"(",
"cb_histogram",
")",
":",
"with",
"read_cbhistogram",
"(",
"cb_histogram",
")",
"as",
"fh",
":",
"cb_vals",
"=",
"[",
"int",
"(",
"p",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"1",
"]",
")",
"for",
"p",
"in",
"fh",
"]",
"histo",
"=",
"np",
".",
"histogram",
"(",
"np",
".",
"log10",
"(",
"cb_vals",
")",
",",
"bins",
"=",
"50",
")",
"vals",
"=",
"histo",
"[",
"0",
"]",
"edges",
"=",
"histo",
"[",
"1",
"]",
"mids",
"=",
"np",
".",
"array",
"(",
"[",
"(",
"edges",
"[",
"i",
"]",
"+",
"edges",
"[",
"i",
"+",
"1",
"]",
")",
"/",
"2",
"for",
"i",
"in",
"range",
"(",
"edges",
".",
"size",
"-",
"1",
")",
"]",
")",
"wdensity",
"=",
"vals",
"*",
"(",
"10",
"**",
"mids",
")",
"/",
"sum",
"(",
"vals",
"*",
"(",
"10",
"**",
"mids",
")",
")",
"baseline",
"=",
"np",
".",
"median",
"(",
"wdensity",
")",
"wdensity",
"=",
"list",
"(",
"wdensity",
")",
"# find highest density in upper half of barcode distribution",
"peak",
"=",
"wdensity",
".",
"index",
"(",
"max",
"(",
"wdensity",
"[",
"len",
"(",
"wdensity",
")",
"/",
"2",
":",
"]",
")",
")",
"cutoff",
"=",
"None",
"for",
"index",
",",
"dens",
"in",
"reversed",
"(",
"list",
"(",
"enumerate",
"(",
"wdensity",
"[",
"1",
":",
"peak",
"]",
")",
")",
")",
":",
"if",
"dens",
"<",
"2",
"*",
"baseline",
":",
"cutoff",
"=",
"index",
"break",
"if",
"not",
"cutoff",
":",
"return",
"None",
"else",
":",
"cutoff",
"=",
"10",
"**",
"mids",
"[",
"cutoff",
"]",
"logger",
".",
"info",
"(",
"'Setting barcode cutoff to %d'",
"%",
"cutoff",
")",
"return",
"cutoff"
] | Guesses at an appropriate barcode cutoff | [
"Guesses",
"at",
"an",
"appropriate",
"barcode",
"cutoff"
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1020-L1044 | train | 251,663 |
vals/umis | umis/umis.py | cb_filter | def cb_filter(fastq, bc1, bc2, bc3, cores, nedit):
''' Filters reads with non-matching barcodes
Expects formatted fastq files.
'''
with open_gzipsafe(bc1) as bc1_fh:
bc1 = set(cb.strip() for cb in bc1_fh)
if bc2:
with open_gzipsafe(bc2) as bc2_fh:
bc2 = set(cb.strip() for cb in bc2_fh)
if bc3:
with open_gzipsafe(bc3) as bc3_fh:
bc3 = set(cb.strip() for cb in bc3_fh)
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
if nedit == 0:
filter_cb = partial(exact_barcode_filter, bc1=bc1, bc2=bc2, bc3=bc3,
re_string=re_string)
else:
bc1hash = MutationHash(bc1, nedit)
bc2hash = None
bc3hash = None
if bc2:
bc2hash = MutationHash(bc2, nedit)
if bc3:
bc3hash = MutationHash(bc3, nedit)
filter_cb = partial(correcting_barcode_filter, bc1hash=bc1hash,
bc2hash=bc2hash, bc3hash=bc3hash, re_string=re_string)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_cb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read) | python | def cb_filter(fastq, bc1, bc2, bc3, cores, nedit):
''' Filters reads with non-matching barcodes
Expects formatted fastq files.
'''
with open_gzipsafe(bc1) as bc1_fh:
bc1 = set(cb.strip() for cb in bc1_fh)
if bc2:
with open_gzipsafe(bc2) as bc2_fh:
bc2 = set(cb.strip() for cb in bc2_fh)
if bc3:
with open_gzipsafe(bc3) as bc3_fh:
bc3 = set(cb.strip() for cb in bc3_fh)
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
if nedit == 0:
filter_cb = partial(exact_barcode_filter, bc1=bc1, bc2=bc2, bc3=bc3,
re_string=re_string)
else:
bc1hash = MutationHash(bc1, nedit)
bc2hash = None
bc3hash = None
if bc2:
bc2hash = MutationHash(bc2, nedit)
if bc3:
bc3hash = MutationHash(bc3, nedit)
filter_cb = partial(correcting_barcode_filter, bc1hash=bc1hash,
bc2hash=bc2hash, bc3hash=bc3hash, re_string=re_string)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_cb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read) | [
"def",
"cb_filter",
"(",
"fastq",
",",
"bc1",
",",
"bc2",
",",
"bc3",
",",
"cores",
",",
"nedit",
")",
":",
"with",
"open_gzipsafe",
"(",
"bc1",
")",
"as",
"bc1_fh",
":",
"bc1",
"=",
"set",
"(",
"cb",
".",
"strip",
"(",
")",
"for",
"cb",
"in",
"bc1_fh",
")",
"if",
"bc2",
":",
"with",
"open_gzipsafe",
"(",
"bc2",
")",
"as",
"bc2_fh",
":",
"bc2",
"=",
"set",
"(",
"cb",
".",
"strip",
"(",
")",
"for",
"cb",
"in",
"bc2_fh",
")",
"if",
"bc3",
":",
"with",
"open_gzipsafe",
"(",
"bc3",
")",
"as",
"bc3_fh",
":",
"bc3",
"=",
"set",
"(",
"cb",
".",
"strip",
"(",
")",
"for",
"cb",
"in",
"bc3_fh",
")",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"if",
"nedit",
"==",
"0",
":",
"filter_cb",
"=",
"partial",
"(",
"exact_barcode_filter",
",",
"bc1",
"=",
"bc1",
",",
"bc2",
"=",
"bc2",
",",
"bc3",
"=",
"bc3",
",",
"re_string",
"=",
"re_string",
")",
"else",
":",
"bc1hash",
"=",
"MutationHash",
"(",
"bc1",
",",
"nedit",
")",
"bc2hash",
"=",
"None",
"bc3hash",
"=",
"None",
"if",
"bc2",
":",
"bc2hash",
"=",
"MutationHash",
"(",
"bc2",
",",
"nedit",
")",
"if",
"bc3",
":",
"bc3hash",
"=",
"MutationHash",
"(",
"bc3",
",",
"nedit",
")",
"filter_cb",
"=",
"partial",
"(",
"correcting_barcode_filter",
",",
"bc1hash",
"=",
"bc1hash",
",",
"bc2hash",
"=",
"bc2hash",
",",
"bc3hash",
"=",
"bc3hash",
",",
"re_string",
"=",
"re_string",
")",
"p",
"=",
"multiprocessing",
".",
"Pool",
"(",
"cores",
")",
"chunks",
"=",
"tz",
".",
"partition_all",
"(",
"10000",
",",
"read_fastq",
"(",
"fastq",
")",
")",
"bigchunks",
"=",
"tz",
".",
"partition_all",
"(",
"cores",
",",
"chunks",
")",
"for",
"bigchunk",
"in",
"bigchunks",
":",
"for",
"chunk",
"in",
"p",
".",
"map",
"(",
"filter_cb",
",",
"list",
"(",
"bigchunk",
")",
")",
":",
"for",
"read",
"in",
"chunk",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"read",
")"
] | Filters reads with non-matching barcodes
Expects formatted fastq files. | [
"Filters",
"reads",
"with",
"non",
"-",
"matching",
"barcodes",
"Expects",
"formatted",
"fastq",
"files",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1053-L1090 | train | 251,664 |
vals/umis | umis/umis.py | sb_filter | def sb_filter(fastq, bc, cores, nedit):
''' Filters reads with non-matching sample barcodes
Expects formatted fastq files.
'''
barcodes = set(sb.strip() for sb in bc)
if nedit == 0:
filter_sb = partial(exact_sample_filter2, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_sb = partial(correcting_sample_filter2, barcodehash=barcodehash)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_sb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read) | python | def sb_filter(fastq, bc, cores, nedit):
''' Filters reads with non-matching sample barcodes
Expects formatted fastq files.
'''
barcodes = set(sb.strip() for sb in bc)
if nedit == 0:
filter_sb = partial(exact_sample_filter2, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_sb = partial(correcting_sample_filter2, barcodehash=barcodehash)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_sb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read) | [
"def",
"sb_filter",
"(",
"fastq",
",",
"bc",
",",
"cores",
",",
"nedit",
")",
":",
"barcodes",
"=",
"set",
"(",
"sb",
".",
"strip",
"(",
")",
"for",
"sb",
"in",
"bc",
")",
"if",
"nedit",
"==",
"0",
":",
"filter_sb",
"=",
"partial",
"(",
"exact_sample_filter2",
",",
"barcodes",
"=",
"barcodes",
")",
"else",
":",
"barcodehash",
"=",
"MutationHash",
"(",
"barcodes",
",",
"nedit",
")",
"filter_sb",
"=",
"partial",
"(",
"correcting_sample_filter2",
",",
"barcodehash",
"=",
"barcodehash",
")",
"p",
"=",
"multiprocessing",
".",
"Pool",
"(",
"cores",
")",
"chunks",
"=",
"tz",
".",
"partition_all",
"(",
"10000",
",",
"read_fastq",
"(",
"fastq",
")",
")",
"bigchunks",
"=",
"tz",
".",
"partition_all",
"(",
"cores",
",",
"chunks",
")",
"for",
"bigchunk",
"in",
"bigchunks",
":",
"for",
"chunk",
"in",
"p",
".",
"map",
"(",
"filter_sb",
",",
"list",
"(",
"bigchunk",
")",
")",
":",
"for",
"read",
"in",
"chunk",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"read",
")"
] | Filters reads with non-matching sample barcodes
Expects formatted fastq files. | [
"Filters",
"reads",
"with",
"non",
"-",
"matching",
"sample",
"barcodes",
"Expects",
"formatted",
"fastq",
"files",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1097-L1114 | train | 251,665 |
vals/umis | umis/umis.py | mb_filter | def mb_filter(fastq, cores):
''' Filters umis with non-ACGT bases
Expects formatted fastq files.
'''
filter_mb = partial(umi_filter)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_mb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read) | python | def mb_filter(fastq, cores):
''' Filters umis with non-ACGT bases
Expects formatted fastq files.
'''
filter_mb = partial(umi_filter)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_mb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read) | [
"def",
"mb_filter",
"(",
"fastq",
",",
"cores",
")",
":",
"filter_mb",
"=",
"partial",
"(",
"umi_filter",
")",
"p",
"=",
"multiprocessing",
".",
"Pool",
"(",
"cores",
")",
"chunks",
"=",
"tz",
".",
"partition_all",
"(",
"10000",
",",
"read_fastq",
"(",
"fastq",
")",
")",
"bigchunks",
"=",
"tz",
".",
"partition_all",
"(",
"cores",
",",
"chunks",
")",
"for",
"bigchunk",
"in",
"bigchunks",
":",
"for",
"chunk",
"in",
"p",
".",
"map",
"(",
"filter_mb",
",",
"list",
"(",
"bigchunk",
")",
")",
":",
"for",
"read",
"in",
"chunk",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"read",
")"
] | Filters umis with non-ACGT bases
Expects formatted fastq files. | [
"Filters",
"umis",
"with",
"non",
"-",
"ACGT",
"bases",
"Expects",
"formatted",
"fastq",
"files",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1119-L1131 | train | 251,666 |
vals/umis | umis/umis.py | kallisto | def kallisto(fastq, out_dir, cb_histogram, cb_cutoff):
''' Convert fastqtransformed file to output format compatible with
kallisto.
'''
parser_re = re.compile('(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n')
if fastq.endswith('gz'):
fastq_fh = gzip.GzipFile(fileobj=open(fastq))
elif fastq == "-":
fastq_fh = sys.stdin
else:
fastq_fh = open(fastq)
cb_depth_set = get_cb_depth_set(cb_histogram, cb_cutoff)
cb_set = set()
cb_batch = collections.defaultdict(list)
parsed = 0
for read in stream_fastq(fastq_fh):
match = parser_re.search(read).groupdict()
umi = match['UMI']
cb = match['CB']
if cb_depth_set and cb not in cb_depth_set:
continue
parsed += 1
cb_set.add(cb)
cb_batch[cb].append((read, umi))
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
cb_batch = collections.defaultdict(list)
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
with open(os.path.join(out_dir, "barcodes.batch"), "w") as out_handle:
out_handle.write("#id umi-file file-1\n")
batchformat = "{cb} {cb}.umi {cb}.fq\n"
for cb in cb_set:
out_handle.write(batchformat.format(**locals())) | python | def kallisto(fastq, out_dir, cb_histogram, cb_cutoff):
''' Convert fastqtransformed file to output format compatible with
kallisto.
'''
parser_re = re.compile('(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n')
if fastq.endswith('gz'):
fastq_fh = gzip.GzipFile(fileobj=open(fastq))
elif fastq == "-":
fastq_fh = sys.stdin
else:
fastq_fh = open(fastq)
cb_depth_set = get_cb_depth_set(cb_histogram, cb_cutoff)
cb_set = set()
cb_batch = collections.defaultdict(list)
parsed = 0
for read in stream_fastq(fastq_fh):
match = parser_re.search(read).groupdict()
umi = match['UMI']
cb = match['CB']
if cb_depth_set and cb not in cb_depth_set:
continue
parsed += 1
cb_set.add(cb)
cb_batch[cb].append((read, umi))
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
cb_batch = collections.defaultdict(list)
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
with open(os.path.join(out_dir, "barcodes.batch"), "w") as out_handle:
out_handle.write("#id umi-file file-1\n")
batchformat = "{cb} {cb}.umi {cb}.fq\n"
for cb in cb_set:
out_handle.write(batchformat.format(**locals())) | [
"def",
"kallisto",
"(",
"fastq",
",",
"out_dir",
",",
"cb_histogram",
",",
"cb_cutoff",
")",
":",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"'(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\\\n(.*)\\\\n\\\\+\\\\n(.*)\\\\n'",
")",
"if",
"fastq",
".",
"endswith",
"(",
"'gz'",
")",
":",
"fastq_fh",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"open",
"(",
"fastq",
")",
")",
"elif",
"fastq",
"==",
"\"-\"",
":",
"fastq_fh",
"=",
"sys",
".",
"stdin",
"else",
":",
"fastq_fh",
"=",
"open",
"(",
"fastq",
")",
"cb_depth_set",
"=",
"get_cb_depth_set",
"(",
"cb_histogram",
",",
"cb_cutoff",
")",
"cb_set",
"=",
"set",
"(",
")",
"cb_batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"parsed",
"=",
"0",
"for",
"read",
"in",
"stream_fastq",
"(",
"fastq_fh",
")",
":",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"umi",
"=",
"match",
"[",
"'UMI'",
"]",
"cb",
"=",
"match",
"[",
"'CB'",
"]",
"if",
"cb_depth_set",
"and",
"cb",
"not",
"in",
"cb_depth_set",
":",
"continue",
"parsed",
"+=",
"1",
"cb_set",
".",
"add",
"(",
"cb",
")",
"cb_batch",
"[",
"cb",
"]",
".",
"append",
"(",
"(",
"read",
",",
"umi",
")",
")",
"# write in batches to avoid opening up file handles repeatedly",
"if",
"not",
"parsed",
"%",
"10000000",
":",
"for",
"cb",
",",
"chunk",
"in",
"cb_batch",
".",
"items",
"(",
")",
":",
"write_kallisto_chunk",
"(",
"out_dir",
",",
"cb",
",",
"chunk",
")",
"cb_batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"cb",
",",
"chunk",
"in",
"cb_batch",
".",
"items",
"(",
")",
":",
"write_kallisto_chunk",
"(",
"out_dir",
",",
"cb",
",",
"chunk",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"\"barcodes.batch\"",
")",
",",
"\"w\"",
")",
"as",
"out_handle",
":",
"out_handle",
".",
"write",
"(",
"\"#id umi-file file-1\\n\"",
")",
"batchformat",
"=",
"\"{cb} {cb}.umi {cb}.fq\\n\"",
"for",
"cb",
"in",
"cb_set",
":",
"out_handle",
".",
"write",
"(",
"batchformat",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")"
] | Convert fastqtransformed file to output format compatible with
kallisto. | [
"Convert",
"fastqtransformed",
"file",
"to",
"output",
"format",
"compatible",
"with",
"kallisto",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1164-L1203 | train | 251,667 |
vals/umis | umis/umis.py | demultiplex_samples | def demultiplex_samples(fastq, out_dir, nedit, barcodes):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each sample.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
if barcodes:
barcodes = set(barcode.strip() for barcode in barcodes)
else:
barcodes = set()
if nedit == 0:
filter_bc = partial(exact_sample_filter, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_bc = partial(correcting_sample_filter, barcodehash=barcodehash)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
read = filter_bc(read)
if not read:
continue
match = parser_re.search(read).groupdict()
sample = match['SB']
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(fixed)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(read) | python | def demultiplex_samples(fastq, out_dir, nedit, barcodes):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each sample.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
if barcodes:
barcodes = set(barcode.strip() for barcode in barcodes)
else:
barcodes = set()
if nedit == 0:
filter_bc = partial(exact_sample_filter, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_bc = partial(correcting_sample_filter, barcodehash=barcodehash)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
read = filter_bc(read)
if not read:
continue
match = parser_re.search(read).groupdict()
sample = match['SB']
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(fixed)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(read) | [
"def",
"demultiplex_samples",
"(",
"fastq",
",",
"out_dir",
",",
"nedit",
",",
"barcodes",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"if",
"barcodes",
":",
"barcodes",
"=",
"set",
"(",
"barcode",
".",
"strip",
"(",
")",
"for",
"barcode",
"in",
"barcodes",
")",
"else",
":",
"barcodes",
"=",
"set",
"(",
")",
"if",
"nedit",
"==",
"0",
":",
"filter_bc",
"=",
"partial",
"(",
"exact_sample_filter",
",",
"barcodes",
"=",
"barcodes",
")",
"else",
":",
"barcodehash",
"=",
"MutationHash",
"(",
"barcodes",
",",
"nedit",
")",
"filter_bc",
"=",
"partial",
"(",
"correcting_sample_filter",
",",
"barcodehash",
"=",
"barcodehash",
")",
"sample_set",
"=",
"set",
"(",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"parsed",
"=",
"0",
"safe_makedir",
"(",
"out_dir",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"parsed",
"+=",
"1",
"read",
"=",
"filter_bc",
"(",
"read",
")",
"if",
"not",
"read",
":",
"continue",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"sample",
"=",
"match",
"[",
"'SB'",
"]",
"sample_set",
".",
"add",
"(",
"sample",
")",
"batch",
"[",
"sample",
"]",
".",
"append",
"(",
"read",
")",
"# write in batches to avoid opening up file handles repeatedly",
"if",
"not",
"parsed",
"%",
"10000000",
":",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"sample",
"+",
"\".fq\"",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"fixed",
"=",
"filter_bc",
"(",
"read",
")",
"if",
"fixed",
":",
"out_handle",
".",
"write",
"(",
"fixed",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"sample",
"+",
"\".fq\"",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"fixed",
"=",
"filter_bc",
"(",
"read",
")",
"if",
"fixed",
":",
"out_handle",
".",
"write",
"(",
"read",
")"
] | Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each sample. | [
"Demultiplex",
"a",
"fastqtransformed",
"FASTQ",
"file",
"into",
"a",
"FASTQ",
"file",
"for",
"each",
"sample",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1256-L1305 | train | 251,668 |
vals/umis | umis/umis.py | demultiplex_cells | def demultiplex_cells(fastq, out_dir, readnumber, prefix, cb_histogram,
cb_cutoff):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each cell.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
readstring = "" if not readnumber else "_R{}".format(readnumber)
filestring = "{prefix}{sample}{readstring}.fq"
cb_set = set()
if cb_histogram:
cb_set = get_cb_depth_set(cb_histogram, cb_cutoff)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
match = parser_re.search(read).groupdict()
sample = match['CB']
if cb_set and sample not in cb_set:
continue
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read) | python | def demultiplex_cells(fastq, out_dir, readnumber, prefix, cb_histogram,
cb_cutoff):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each cell.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
readstring = "" if not readnumber else "_R{}".format(readnumber)
filestring = "{prefix}{sample}{readstring}.fq"
cb_set = set()
if cb_histogram:
cb_set = get_cb_depth_set(cb_histogram, cb_cutoff)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
match = parser_re.search(read).groupdict()
sample = match['CB']
if cb_set and sample not in cb_set:
continue
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read) | [
"def",
"demultiplex_cells",
"(",
"fastq",
",",
"out_dir",
",",
"readnumber",
",",
"prefix",
",",
"cb_histogram",
",",
"cb_cutoff",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"readstring",
"=",
"\"\"",
"if",
"not",
"readnumber",
"else",
"\"_R{}\"",
".",
"format",
"(",
"readnumber",
")",
"filestring",
"=",
"\"{prefix}{sample}{readstring}.fq\"",
"cb_set",
"=",
"set",
"(",
")",
"if",
"cb_histogram",
":",
"cb_set",
"=",
"get_cb_depth_set",
"(",
"cb_histogram",
",",
"cb_cutoff",
")",
"sample_set",
"=",
"set",
"(",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"parsed",
"=",
"0",
"safe_makedir",
"(",
"out_dir",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"parsed",
"+=",
"1",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"sample",
"=",
"match",
"[",
"'CB'",
"]",
"if",
"cb_set",
"and",
"sample",
"not",
"in",
"cb_set",
":",
"continue",
"sample_set",
".",
"add",
"(",
"sample",
")",
"batch",
"[",
"sample",
"]",
".",
"append",
"(",
"read",
")",
"# write in batches to avoid opening up file handles repeatedly",
"if",
"not",
"parsed",
"%",
"10000000",
":",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"filestring",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"out_handle",
".",
"write",
"(",
"read",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"filestring",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"out_handle",
".",
"write",
"(",
"read",
")"
] | Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each cell. | [
"Demultiplex",
"a",
"fastqtransformed",
"FASTQ",
"file",
"into",
"a",
"FASTQ",
"file",
"for",
"each",
"cell",
"."
] | e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1317-L1355 | train | 251,669 |
Deepwalker/aldjemy | aldjemy/postgres.py | array_type | def array_type(data_types, field):
"""
Allows conversion of Django ArrayField to SQLAlchemy Array.
Takes care of mapping the type of the array element.
"""
from sqlalchemy.dialects import postgresql
internal_type = field.base_field.get_internal_type()
# currently no support for multi-dimensional arrays
if internal_type in data_types and internal_type != 'ArrayField':
sub_type = data_types[internal_type](field)
if not isinstance(sub_type, (list, tuple)):
sub_type = [sub_type]
else:
raise RuntimeError('Unsupported array element type')
return postgresql.ARRAY(sub_type) | python | def array_type(data_types, field):
"""
Allows conversion of Django ArrayField to SQLAlchemy Array.
Takes care of mapping the type of the array element.
"""
from sqlalchemy.dialects import postgresql
internal_type = field.base_field.get_internal_type()
# currently no support for multi-dimensional arrays
if internal_type in data_types and internal_type != 'ArrayField':
sub_type = data_types[internal_type](field)
if not isinstance(sub_type, (list, tuple)):
sub_type = [sub_type]
else:
raise RuntimeError('Unsupported array element type')
return postgresql.ARRAY(sub_type) | [
"def",
"array_type",
"(",
"data_types",
",",
"field",
")",
":",
"from",
"sqlalchemy",
".",
"dialects",
"import",
"postgresql",
"internal_type",
"=",
"field",
".",
"base_field",
".",
"get_internal_type",
"(",
")",
"# currently no support for multi-dimensional arrays",
"if",
"internal_type",
"in",
"data_types",
"and",
"internal_type",
"!=",
"'ArrayField'",
":",
"sub_type",
"=",
"data_types",
"[",
"internal_type",
"]",
"(",
"field",
")",
"if",
"not",
"isinstance",
"(",
"sub_type",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"sub_type",
"=",
"[",
"sub_type",
"]",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Unsupported array element type'",
")",
"return",
"postgresql",
".",
"ARRAY",
"(",
"sub_type",
")"
] | Allows conversion of Django ArrayField to SQLAlchemy Array.
Takes care of mapping the type of the array element. | [
"Allows",
"conversion",
"of",
"Django",
"ArrayField",
"to",
"SQLAlchemy",
"Array",
".",
"Takes",
"care",
"of",
"mapping",
"the",
"type",
"of",
"the",
"array",
"element",
"."
] | d58359a3710e7f21e47a70765b9d75c61143ceb1 | https://github.com/Deepwalker/aldjemy/blob/d58359a3710e7f21e47a70765b9d75c61143ceb1/aldjemy/postgres.py#L4-L21 | train | 251,670 |
Azure/blobxfer | blobxfer/util.py | set_verbose_logger_handlers | def set_verbose_logger_handlers(): # noqa
# type: (None) -> None
"""Set logger handler formatters to more detail"""
global _REGISTERED_LOGGER_HANDLERS
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '
'%(message)s')
formatter.default_msec_format = '%s.%03d'
for handler in _REGISTERED_LOGGER_HANDLERS:
handler.setFormatter(formatter) | python | def set_verbose_logger_handlers(): # noqa
# type: (None) -> None
"""Set logger handler formatters to more detail"""
global _REGISTERED_LOGGER_HANDLERS
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '
'%(message)s')
formatter.default_msec_format = '%s.%03d'
for handler in _REGISTERED_LOGGER_HANDLERS:
handler.setFormatter(formatter) | [
"def",
"set_verbose_logger_handlers",
"(",
")",
":",
"# noqa",
"# type: (None) -> None",
"global",
"_REGISTERED_LOGGER_HANDLERS",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"'%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '",
"'%(message)s'",
")",
"formatter",
".",
"default_msec_format",
"=",
"'%s.%03d'",
"for",
"handler",
"in",
"_REGISTERED_LOGGER_HANDLERS",
":",
"handler",
".",
"setFormatter",
"(",
"formatter",
")"
] | Set logger handler formatters to more detail | [
"Set",
"logger",
"handler",
"formatters",
"to",
"more",
"detail"
] | 3eccbe7530cc6a20ab2d30f9e034b6f021817f34 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/blobxfer/util.py#L114-L123 | train | 251,671 |
Azure/blobxfer | cli/cli.py | download | def download(ctx):
"""Download blobs or files from Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Download)
ctx.initialize(settings.TransferAction.Download)
specs = settings.create_download_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Downloader(
ctx.general_options, ctx.credentials, spec
).start() | python | def download(ctx):
"""Download blobs or files from Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Download)
ctx.initialize(settings.TransferAction.Download)
specs = settings.create_download_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Downloader(
ctx.general_options, ctx.credentials, spec
).start() | [
"def",
"download",
"(",
"ctx",
")",
":",
"settings",
".",
"add_cli_options",
"(",
"ctx",
".",
"cli_options",
",",
"settings",
".",
"TransferAction",
".",
"Download",
")",
"ctx",
".",
"initialize",
"(",
"settings",
".",
"TransferAction",
".",
"Download",
")",
"specs",
"=",
"settings",
".",
"create_download_specifications",
"(",
"ctx",
".",
"cli_options",
",",
"ctx",
".",
"config",
")",
"del",
"ctx",
".",
"cli_options",
"for",
"spec",
"in",
"specs",
":",
"blobxfer",
".",
"api",
".",
"Downloader",
"(",
"ctx",
".",
"general_options",
",",
"ctx",
".",
"credentials",
",",
"spec",
")",
".",
"start",
"(",
")"
] | Download blobs or files from Azure Storage | [
"Download",
"blobs",
"or",
"files",
"from",
"Azure",
"Storage"
] | 3eccbe7530cc6a20ab2d30f9e034b6f021817f34 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1071-L1081 | train | 251,672 |
Azure/blobxfer | cli/cli.py | synccopy | def synccopy(ctx):
"""Synchronously copy blobs or files between Azure Storage accounts"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Synccopy)
ctx.initialize(settings.TransferAction.Synccopy)
specs = settings.create_synccopy_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.SyncCopy(
ctx.general_options, ctx.credentials, spec
).start() | python | def synccopy(ctx):
"""Synchronously copy blobs or files between Azure Storage accounts"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Synccopy)
ctx.initialize(settings.TransferAction.Synccopy)
specs = settings.create_synccopy_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.SyncCopy(
ctx.general_options, ctx.credentials, spec
).start() | [
"def",
"synccopy",
"(",
"ctx",
")",
":",
"settings",
".",
"add_cli_options",
"(",
"ctx",
".",
"cli_options",
",",
"settings",
".",
"TransferAction",
".",
"Synccopy",
")",
"ctx",
".",
"initialize",
"(",
"settings",
".",
"TransferAction",
".",
"Synccopy",
")",
"specs",
"=",
"settings",
".",
"create_synccopy_specifications",
"(",
"ctx",
".",
"cli_options",
",",
"ctx",
".",
"config",
")",
"del",
"ctx",
".",
"cli_options",
"for",
"spec",
"in",
"specs",
":",
"blobxfer",
".",
"api",
".",
"SyncCopy",
"(",
"ctx",
".",
"general_options",
",",
"ctx",
".",
"credentials",
",",
"spec",
")",
".",
"start",
"(",
")"
] | Synchronously copy blobs or files between Azure Storage accounts | [
"Synchronously",
"copy",
"blobs",
"or",
"files",
"between",
"Azure",
"Storage",
"accounts"
] | 3eccbe7530cc6a20ab2d30f9e034b6f021817f34 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1088-L1098 | train | 251,673 |
Azure/blobxfer | cli/cli.py | upload | def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Uploader(
ctx.general_options, ctx.credentials, spec
).start() | python | def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Uploader(
ctx.general_options, ctx.credentials, spec
).start() | [
"def",
"upload",
"(",
"ctx",
")",
":",
"settings",
".",
"add_cli_options",
"(",
"ctx",
".",
"cli_options",
",",
"settings",
".",
"TransferAction",
".",
"Upload",
")",
"ctx",
".",
"initialize",
"(",
"settings",
".",
"TransferAction",
".",
"Upload",
")",
"specs",
"=",
"settings",
".",
"create_upload_specifications",
"(",
"ctx",
".",
"cli_options",
",",
"ctx",
".",
"config",
")",
"del",
"ctx",
".",
"cli_options",
"for",
"spec",
"in",
"specs",
":",
"blobxfer",
".",
"api",
".",
"Uploader",
"(",
"ctx",
".",
"general_options",
",",
"ctx",
".",
"credentials",
",",
"spec",
")",
".",
"start",
"(",
")"
] | Upload files to Azure Storage | [
"Upload",
"files",
"to",
"Azure",
"Storage"
] | 3eccbe7530cc6a20ab2d30f9e034b6f021817f34 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1106-L1116 | train | 251,674 |
knaperek/djangosaml2 | djangosaml2/utils.py | get_idp_sso_supported_bindings | def get_idp_sso_supported_bindings(idp_entity_id=None, config=None):
"""Returns the list of bindings supported by an IDP
This is not clear in the pysaml2 code, so wrapping it in a util"""
if config is None:
# avoid circular import
from djangosaml2.conf import get_config
config = get_config()
# load metadata store from config
meta = getattr(config, 'metadata', {})
# if idp is None, assume only one exists so just use that
if idp_entity_id is None:
# .keys() returns dict_keys in python3.5+
try:
idp_entity_id = list(available_idps(config).keys())[0]
except IndexError:
raise ImproperlyConfigured("No IdP configured!")
try:
return meta.service(idp_entity_id, 'idpsso_descriptor', 'single_sign_on_service').keys()
except UnknownSystemEntity:
return [] | python | def get_idp_sso_supported_bindings(idp_entity_id=None, config=None):
"""Returns the list of bindings supported by an IDP
This is not clear in the pysaml2 code, so wrapping it in a util"""
if config is None:
# avoid circular import
from djangosaml2.conf import get_config
config = get_config()
# load metadata store from config
meta = getattr(config, 'metadata', {})
# if idp is None, assume only one exists so just use that
if idp_entity_id is None:
# .keys() returns dict_keys in python3.5+
try:
idp_entity_id = list(available_idps(config).keys())[0]
except IndexError:
raise ImproperlyConfigured("No IdP configured!")
try:
return meta.service(idp_entity_id, 'idpsso_descriptor', 'single_sign_on_service').keys()
except UnknownSystemEntity:
return [] | [
"def",
"get_idp_sso_supported_bindings",
"(",
"idp_entity_id",
"=",
"None",
",",
"config",
"=",
"None",
")",
":",
"if",
"config",
"is",
"None",
":",
"# avoid circular import",
"from",
"djangosaml2",
".",
"conf",
"import",
"get_config",
"config",
"=",
"get_config",
"(",
")",
"# load metadata store from config",
"meta",
"=",
"getattr",
"(",
"config",
",",
"'metadata'",
",",
"{",
"}",
")",
"# if idp is None, assume only one exists so just use that",
"if",
"idp_entity_id",
"is",
"None",
":",
"# .keys() returns dict_keys in python3.5+",
"try",
":",
"idp_entity_id",
"=",
"list",
"(",
"available_idps",
"(",
"config",
")",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"except",
"IndexError",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"No IdP configured!\"",
")",
"try",
":",
"return",
"meta",
".",
"service",
"(",
"idp_entity_id",
",",
"'idpsso_descriptor'",
",",
"'single_sign_on_service'",
")",
".",
"keys",
"(",
")",
"except",
"UnknownSystemEntity",
":",
"return",
"[",
"]"
] | Returns the list of bindings supported by an IDP
This is not clear in the pysaml2 code, so wrapping it in a util | [
"Returns",
"the",
"list",
"of",
"bindings",
"supported",
"by",
"an",
"IDP",
"This",
"is",
"not",
"clear",
"in",
"the",
"pysaml2",
"code",
"so",
"wrapping",
"it",
"in",
"a",
"util"
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/utils.py#L41-L60 | train | 251,675 |
knaperek/djangosaml2 | djangosaml2/utils.py | fail_acs_response | def fail_acs_response(request, *args, **kwargs):
""" Serves as a common mechanism for ending ACS in case of any SAML related failure.
Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as
suitable for the project.
The default behavior uses SAML specific template that is rendered on any ACS error,
but this can be simply changed so that PermissionDenied exception is raised instead.
"""
failure_function = import_string(get_custom_setting('SAML_ACS_FAILURE_RESPONSE_FUNCTION',
'djangosaml2.acs_failures.template_failure'))
return failure_function(request, *args, **kwargs) | python | def fail_acs_response(request, *args, **kwargs):
""" Serves as a common mechanism for ending ACS in case of any SAML related failure.
Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as
suitable for the project.
The default behavior uses SAML specific template that is rendered on any ACS error,
but this can be simply changed so that PermissionDenied exception is raised instead.
"""
failure_function = import_string(get_custom_setting('SAML_ACS_FAILURE_RESPONSE_FUNCTION',
'djangosaml2.acs_failures.template_failure'))
return failure_function(request, *args, **kwargs) | [
"def",
"fail_acs_response",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"failure_function",
"=",
"import_string",
"(",
"get_custom_setting",
"(",
"'SAML_ACS_FAILURE_RESPONSE_FUNCTION'",
",",
"'djangosaml2.acs_failures.template_failure'",
")",
")",
"return",
"failure_function",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Serves as a common mechanism for ending ACS in case of any SAML related failure.
Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as
suitable for the project.
The default behavior uses SAML specific template that is rendered on any ACS error,
but this can be simply changed so that PermissionDenied exception is raised instead. | [
"Serves",
"as",
"a",
"common",
"mechanism",
"for",
"ending",
"ACS",
"in",
"case",
"of",
"any",
"SAML",
"related",
"failure",
".",
"Handling",
"can",
"be",
"configured",
"by",
"setting",
"the",
"SAML_ACS_FAILURE_RESPONSE_FUNCTION",
"as",
"suitable",
"for",
"the",
"project",
"."
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/utils.py#L72-L82 | train | 251,676 |
knaperek/djangosaml2 | djangosaml2/views.py | echo_attributes | def echo_attributes(request,
config_loader_path=None,
template='djangosaml2/echo_attributes.html'):
"""Example view that echo the SAML attributes of an user"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
try:
identity = client.users.get_identity(subject_id,
check_not_on_or_after=False)
except AttributeError:
return HttpResponse("No active SAML identity found. Are you sure you have logged in via SAML?")
return render(request, template, {'attributes': identity[0]}) | python | def echo_attributes(request,
config_loader_path=None,
template='djangosaml2/echo_attributes.html'):
"""Example view that echo the SAML attributes of an user"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
try:
identity = client.users.get_identity(subject_id,
check_not_on_or_after=False)
except AttributeError:
return HttpResponse("No active SAML identity found. Are you sure you have logged in via SAML?")
return render(request, template, {'attributes': identity[0]}) | [
"def",
"echo_attributes",
"(",
"request",
",",
"config_loader_path",
"=",
"None",
",",
"template",
"=",
"'djangosaml2/echo_attributes.html'",
")",
":",
"state",
"=",
"StateCache",
"(",
"request",
".",
"session",
")",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"client",
"=",
"Saml2Client",
"(",
"conf",
",",
"state_cache",
"=",
"state",
",",
"identity_cache",
"=",
"IdentityCache",
"(",
"request",
".",
"session",
")",
")",
"subject_id",
"=",
"_get_subject_id",
"(",
"request",
".",
"session",
")",
"try",
":",
"identity",
"=",
"client",
".",
"users",
".",
"get_identity",
"(",
"subject_id",
",",
"check_not_on_or_after",
"=",
"False",
")",
"except",
"AttributeError",
":",
"return",
"HttpResponse",
"(",
"\"No active SAML identity found. Are you sure you have logged in via SAML?\"",
")",
"return",
"render",
"(",
"request",
",",
"template",
",",
"{",
"'attributes'",
":",
"identity",
"[",
"0",
"]",
"}",
")"
] | Example view that echo the SAML attributes of an user | [
"Example",
"view",
"that",
"echo",
"the",
"SAML",
"attributes",
"of",
"an",
"user"
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L342-L358 | train | 251,677 |
knaperek/djangosaml2 | djangosaml2/views.py | logout | def logout(request, config_loader_path=None):
"""SAML Logout Request initiator
This view initiates the SAML2 Logout request
using the pysaml2 library to create the LogoutRequest.
"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s',
request.user)
result = client.global_logout(subject_id)
state.sync()
if not result:
logger.error("Looks like the user %s is not logged in any IdP/AA", subject_id)
return HttpResponseBadRequest("You are not logged in any IdP/AA")
if len(result) > 1:
logger.error('Sorry, I do not know how to logout from several sources. I will logout just from the first one')
for entityid, logout_info in result.items():
if isinstance(logout_info, tuple):
binding, http_info = logout_info
if binding == BINDING_HTTP_POST:
logger.debug('Returning form to the IdP to continue the logout process')
body = ''.join(http_info['data'])
return HttpResponse(body)
elif binding == BINDING_HTTP_REDIRECT:
logger.debug('Redirecting to the IdP to continue the logout process')
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('Unknown binding: %s', binding)
return HttpResponseServerError('Failed to log out')
else:
# We must have had a soap logout
return finish_logout(request, logout_info)
logger.error('Could not logout because there only the HTTP_REDIRECT is supported')
return HttpResponseServerError('Logout Binding not supported') | python | def logout(request, config_loader_path=None):
"""SAML Logout Request initiator
This view initiates the SAML2 Logout request
using the pysaml2 library to create the LogoutRequest.
"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s',
request.user)
result = client.global_logout(subject_id)
state.sync()
if not result:
logger.error("Looks like the user %s is not logged in any IdP/AA", subject_id)
return HttpResponseBadRequest("You are not logged in any IdP/AA")
if len(result) > 1:
logger.error('Sorry, I do not know how to logout from several sources. I will logout just from the first one')
for entityid, logout_info in result.items():
if isinstance(logout_info, tuple):
binding, http_info = logout_info
if binding == BINDING_HTTP_POST:
logger.debug('Returning form to the IdP to continue the logout process')
body = ''.join(http_info['data'])
return HttpResponse(body)
elif binding == BINDING_HTTP_REDIRECT:
logger.debug('Redirecting to the IdP to continue the logout process')
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('Unknown binding: %s', binding)
return HttpResponseServerError('Failed to log out')
else:
# We must have had a soap logout
return finish_logout(request, logout_info)
logger.error('Could not logout because there only the HTTP_REDIRECT is supported')
return HttpResponseServerError('Logout Binding not supported') | [
"def",
"logout",
"(",
"request",
",",
"config_loader_path",
"=",
"None",
")",
":",
"state",
"=",
"StateCache",
"(",
"request",
".",
"session",
")",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"client",
"=",
"Saml2Client",
"(",
"conf",
",",
"state_cache",
"=",
"state",
",",
"identity_cache",
"=",
"IdentityCache",
"(",
"request",
".",
"session",
")",
")",
"subject_id",
"=",
"_get_subject_id",
"(",
"request",
".",
"session",
")",
"if",
"subject_id",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"'The session does not contain the subject id for user %s'",
",",
"request",
".",
"user",
")",
"result",
"=",
"client",
".",
"global_logout",
"(",
"subject_id",
")",
"state",
".",
"sync",
"(",
")",
"if",
"not",
"result",
":",
"logger",
".",
"error",
"(",
"\"Looks like the user %s is not logged in any IdP/AA\"",
",",
"subject_id",
")",
"return",
"HttpResponseBadRequest",
"(",
"\"You are not logged in any IdP/AA\"",
")",
"if",
"len",
"(",
"result",
")",
">",
"1",
":",
"logger",
".",
"error",
"(",
"'Sorry, I do not know how to logout from several sources. I will logout just from the first one'",
")",
"for",
"entityid",
",",
"logout_info",
"in",
"result",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"logout_info",
",",
"tuple",
")",
":",
"binding",
",",
"http_info",
"=",
"logout_info",
"if",
"binding",
"==",
"BINDING_HTTP_POST",
":",
"logger",
".",
"debug",
"(",
"'Returning form to the IdP to continue the logout process'",
")",
"body",
"=",
"''",
".",
"join",
"(",
"http_info",
"[",
"'data'",
"]",
")",
"return",
"HttpResponse",
"(",
"body",
")",
"elif",
"binding",
"==",
"BINDING_HTTP_REDIRECT",
":",
"logger",
".",
"debug",
"(",
"'Redirecting to the IdP to continue the logout process'",
")",
"return",
"HttpResponseRedirect",
"(",
"get_location",
"(",
"http_info",
")",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Unknown binding: %s'",
",",
"binding",
")",
"return",
"HttpResponseServerError",
"(",
"'Failed to log out'",
")",
"else",
":",
"# We must have had a soap logout",
"return",
"finish_logout",
"(",
"request",
",",
"logout_info",
")",
"logger",
".",
"error",
"(",
"'Could not logout because there only the HTTP_REDIRECT is supported'",
")",
"return",
"HttpResponseServerError",
"(",
"'Logout Binding not supported'",
")"
] | SAML Logout Request initiator
This view initiates the SAML2 Logout request
using the pysaml2 library to create the LogoutRequest. | [
"SAML",
"Logout",
"Request",
"initiator"
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L362-L408 | train | 251,678 |
knaperek/djangosaml2 | djangosaml2/views.py | do_logout_service | def do_logout_service(request, data, binding, config_loader_path=None, next_page=None,
logout_error_template='djangosaml2/logout_error.html'):
"""SAML Logout Response endpoint
The IdP will send the logout response to this view,
which will process it with pysaml2 help and log the user
out.
Note that the IdP can request a logout even when
we didn't initiate the process as a single logout
request started by another SP.
"""
logger.debug('Logout service started')
conf = get_config(config_loader_path, request)
state = StateCache(request.session)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
if 'SAMLResponse' in data: # we started the logout
logger.debug('Receiving a logout response from the IdP')
response = client.parse_logout_request_response(data['SAMLResponse'], binding)
state.sync()
return finish_logout(request, response, next_page=next_page)
elif 'SAMLRequest' in data: # logout started by the IdP
logger.debug('Receiving a logout request from the IdP')
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s. Performing local logout',
request.user)
auth.logout(request)
return render(request, logout_error_template, status=403)
else:
http_info = client.handle_logout_request(
data['SAMLRequest'],
subject_id,
binding,
relay_state=data.get('RelayState', ''))
state.sync()
auth.logout(request)
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('No SAMLResponse or SAMLRequest parameter found')
raise Http404('No SAMLResponse or SAMLRequest parameter found') | python | def do_logout_service(request, data, binding, config_loader_path=None, next_page=None,
logout_error_template='djangosaml2/logout_error.html'):
"""SAML Logout Response endpoint
The IdP will send the logout response to this view,
which will process it with pysaml2 help and log the user
out.
Note that the IdP can request a logout even when
we didn't initiate the process as a single logout
request started by another SP.
"""
logger.debug('Logout service started')
conf = get_config(config_loader_path, request)
state = StateCache(request.session)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
if 'SAMLResponse' in data: # we started the logout
logger.debug('Receiving a logout response from the IdP')
response = client.parse_logout_request_response(data['SAMLResponse'], binding)
state.sync()
return finish_logout(request, response, next_page=next_page)
elif 'SAMLRequest' in data: # logout started by the IdP
logger.debug('Receiving a logout request from the IdP')
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s. Performing local logout',
request.user)
auth.logout(request)
return render(request, logout_error_template, status=403)
else:
http_info = client.handle_logout_request(
data['SAMLRequest'],
subject_id,
binding,
relay_state=data.get('RelayState', ''))
state.sync()
auth.logout(request)
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('No SAMLResponse or SAMLRequest parameter found')
raise Http404('No SAMLResponse or SAMLRequest parameter found') | [
"def",
"do_logout_service",
"(",
"request",
",",
"data",
",",
"binding",
",",
"config_loader_path",
"=",
"None",
",",
"next_page",
"=",
"None",
",",
"logout_error_template",
"=",
"'djangosaml2/logout_error.html'",
")",
":",
"logger",
".",
"debug",
"(",
"'Logout service started'",
")",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"state",
"=",
"StateCache",
"(",
"request",
".",
"session",
")",
"client",
"=",
"Saml2Client",
"(",
"conf",
",",
"state_cache",
"=",
"state",
",",
"identity_cache",
"=",
"IdentityCache",
"(",
"request",
".",
"session",
")",
")",
"if",
"'SAMLResponse'",
"in",
"data",
":",
"# we started the logout",
"logger",
".",
"debug",
"(",
"'Receiving a logout response from the IdP'",
")",
"response",
"=",
"client",
".",
"parse_logout_request_response",
"(",
"data",
"[",
"'SAMLResponse'",
"]",
",",
"binding",
")",
"state",
".",
"sync",
"(",
")",
"return",
"finish_logout",
"(",
"request",
",",
"response",
",",
"next_page",
"=",
"next_page",
")",
"elif",
"'SAMLRequest'",
"in",
"data",
":",
"# logout started by the IdP",
"logger",
".",
"debug",
"(",
"'Receiving a logout request from the IdP'",
")",
"subject_id",
"=",
"_get_subject_id",
"(",
"request",
".",
"session",
")",
"if",
"subject_id",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"'The session does not contain the subject id for user %s. Performing local logout'",
",",
"request",
".",
"user",
")",
"auth",
".",
"logout",
"(",
"request",
")",
"return",
"render",
"(",
"request",
",",
"logout_error_template",
",",
"status",
"=",
"403",
")",
"else",
":",
"http_info",
"=",
"client",
".",
"handle_logout_request",
"(",
"data",
"[",
"'SAMLRequest'",
"]",
",",
"subject_id",
",",
"binding",
",",
"relay_state",
"=",
"data",
".",
"get",
"(",
"'RelayState'",
",",
"''",
")",
")",
"state",
".",
"sync",
"(",
")",
"auth",
".",
"logout",
"(",
"request",
")",
"return",
"HttpResponseRedirect",
"(",
"get_location",
"(",
"http_info",
")",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'No SAMLResponse or SAMLRequest parameter found'",
")",
"raise",
"Http404",
"(",
"'No SAMLResponse or SAMLRequest parameter found'",
")"
] | SAML Logout Response endpoint
The IdP will send the logout response to this view,
which will process it with pysaml2 help and log the user
out.
Note that the IdP can request a logout even when
we didn't initiate the process as a single logout
request started by another SP. | [
"SAML",
"Logout",
"Response",
"endpoint"
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L420-L464 | train | 251,679 |
knaperek/djangosaml2 | djangosaml2/views.py | metadata | def metadata(request, config_loader_path=None, valid_for=None):
"""Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings.py file.
"""
conf = get_config(config_loader_path, request)
metadata = entity_descriptor(conf)
return HttpResponse(content=text_type(metadata).encode('utf-8'),
content_type="text/xml; charset=utf8") | python | def metadata(request, config_loader_path=None, valid_for=None):
"""Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings.py file.
"""
conf = get_config(config_loader_path, request)
metadata = entity_descriptor(conf)
return HttpResponse(content=text_type(metadata).encode('utf-8'),
content_type="text/xml; charset=utf8") | [
"def",
"metadata",
"(",
"request",
",",
"config_loader_path",
"=",
"None",
",",
"valid_for",
"=",
"None",
")",
":",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"metadata",
"=",
"entity_descriptor",
"(",
"conf",
")",
"return",
"HttpResponse",
"(",
"content",
"=",
"text_type",
"(",
"metadata",
")",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"content_type",
"=",
"\"text/xml; charset=utf8\"",
")"
] | Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings.py file. | [
"Returns",
"an",
"XML",
"with",
"the",
"SAML",
"2",
".",
"0",
"metadata",
"for",
"this",
"SP",
"as",
"configured",
"in",
"the",
"settings",
".",
"py",
"file",
"."
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L479-L486 | train | 251,680 |
knaperek/djangosaml2 | djangosaml2/backends.py | Saml2Backend.configure_user | def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
return self.update_user(user, attributes, attribute_mapping,
force_save=True) | python | def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
return self.update_user(user, attributes, attribute_mapping,
force_save=True) | [
"def",
"configure_user",
"(",
"self",
",",
"user",
",",
"attributes",
",",
"attribute_mapping",
")",
":",
"user",
".",
"set_unusable_password",
"(",
")",
"return",
"self",
".",
"update_user",
"(",
"user",
",",
"attributes",
",",
"attribute_mapping",
",",
"force_save",
"=",
"True",
")"
] | Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated. | [
"Configures",
"a",
"user",
"after",
"creation",
"and",
"returns",
"the",
"updated",
"user",
"."
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L198-L205 | train | 251,681 |
knaperek/djangosaml2 | djangosaml2/backends.py | Saml2Backend.update_user | def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set.
"""
if not attribute_mapping:
return user
user_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
attr_value_list = attributes.get(saml_attr)
if not attr_value_list:
logger.debug(
'Could not find value for "%s", not updating fields "%s"',
saml_attr, django_attrs)
continue
for attr in django_attrs:
if hasattr(user, attr):
user_attr = getattr(user, attr)
if callable(user_attr):
modified = user_attr(attr_value_list)
else:
modified = self._set_attribute(user, attr, attr_value_list[0])
user_modified = user_modified or modified
else:
logger.debug(
'Could not find attribute "%s" on user "%s"', attr, user)
logger.debug('Sending the pre_save signal')
signal_modified = any(
[response for receiver, response
in pre_user_save.send_robust(sender=user.__class__,
instance=user,
attributes=attributes,
user_modified=user_modified)]
)
if user_modified or signal_modified or force_save:
user.save()
return user | python | def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set.
"""
if not attribute_mapping:
return user
user_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
attr_value_list = attributes.get(saml_attr)
if not attr_value_list:
logger.debug(
'Could not find value for "%s", not updating fields "%s"',
saml_attr, django_attrs)
continue
for attr in django_attrs:
if hasattr(user, attr):
user_attr = getattr(user, attr)
if callable(user_attr):
modified = user_attr(attr_value_list)
else:
modified = self._set_attribute(user, attr, attr_value_list[0])
user_modified = user_modified or modified
else:
logger.debug(
'Could not find attribute "%s" on user "%s"', attr, user)
logger.debug('Sending the pre_save signal')
signal_modified = any(
[response for receiver, response
in pre_user_save.send_robust(sender=user.__class__,
instance=user,
attributes=attributes,
user_modified=user_modified)]
)
if user_modified or signal_modified or force_save:
user.save()
return user | [
"def",
"update_user",
"(",
"self",
",",
"user",
",",
"attributes",
",",
"attribute_mapping",
",",
"force_save",
"=",
"False",
")",
":",
"if",
"not",
"attribute_mapping",
":",
"return",
"user",
"user_modified",
"=",
"False",
"for",
"saml_attr",
",",
"django_attrs",
"in",
"attribute_mapping",
".",
"items",
"(",
")",
":",
"attr_value_list",
"=",
"attributes",
".",
"get",
"(",
"saml_attr",
")",
"if",
"not",
"attr_value_list",
":",
"logger",
".",
"debug",
"(",
"'Could not find value for \"%s\", not updating fields \"%s\"'",
",",
"saml_attr",
",",
"django_attrs",
")",
"continue",
"for",
"attr",
"in",
"django_attrs",
":",
"if",
"hasattr",
"(",
"user",
",",
"attr",
")",
":",
"user_attr",
"=",
"getattr",
"(",
"user",
",",
"attr",
")",
"if",
"callable",
"(",
"user_attr",
")",
":",
"modified",
"=",
"user_attr",
"(",
"attr_value_list",
")",
"else",
":",
"modified",
"=",
"self",
".",
"_set_attribute",
"(",
"user",
",",
"attr",
",",
"attr_value_list",
"[",
"0",
"]",
")",
"user_modified",
"=",
"user_modified",
"or",
"modified",
"else",
":",
"logger",
".",
"debug",
"(",
"'Could not find attribute \"%s\" on user \"%s\"'",
",",
"attr",
",",
"user",
")",
"logger",
".",
"debug",
"(",
"'Sending the pre_save signal'",
")",
"signal_modified",
"=",
"any",
"(",
"[",
"response",
"for",
"receiver",
",",
"response",
"in",
"pre_user_save",
".",
"send_robust",
"(",
"sender",
"=",
"user",
".",
"__class__",
",",
"instance",
"=",
"user",
",",
"attributes",
"=",
"attributes",
",",
"user_modified",
"=",
"user_modified",
")",
"]",
")",
"if",
"user_modified",
"or",
"signal_modified",
"or",
"force_save",
":",
"user",
".",
"save",
"(",
")",
"return",
"user"
] | Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set. | [
"Update",
"a",
"user",
"with",
"a",
"set",
"of",
"attributes",
"and",
"returns",
"the",
"updated",
"user",
"."
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L207-L252 | train | 251,682 |
knaperek/djangosaml2 | djangosaml2/backends.py | Saml2Backend._set_attribute | def _set_attribute(self, obj, attr, value):
"""Set an attribute of an object to a specific value.
Return True if the attribute was changed and False otherwise.
"""
field = obj._meta.get_field(attr)
if field.max_length is not None and len(value) > field.max_length:
cleaned_value = value[:field.max_length]
logger.warn('The attribute "%s" was trimmed from "%s" to "%s"',
attr, value, cleaned_value)
else:
cleaned_value = value
old_value = getattr(obj, attr)
if cleaned_value != old_value:
setattr(obj, attr, cleaned_value)
return True
return False | python | def _set_attribute(self, obj, attr, value):
"""Set an attribute of an object to a specific value.
Return True if the attribute was changed and False otherwise.
"""
field = obj._meta.get_field(attr)
if field.max_length is not None and len(value) > field.max_length:
cleaned_value = value[:field.max_length]
logger.warn('The attribute "%s" was trimmed from "%s" to "%s"',
attr, value, cleaned_value)
else:
cleaned_value = value
old_value = getattr(obj, attr)
if cleaned_value != old_value:
setattr(obj, attr, cleaned_value)
return True
return False | [
"def",
"_set_attribute",
"(",
"self",
",",
"obj",
",",
"attr",
",",
"value",
")",
":",
"field",
"=",
"obj",
".",
"_meta",
".",
"get_field",
"(",
"attr",
")",
"if",
"field",
".",
"max_length",
"is",
"not",
"None",
"and",
"len",
"(",
"value",
")",
">",
"field",
".",
"max_length",
":",
"cleaned_value",
"=",
"value",
"[",
":",
"field",
".",
"max_length",
"]",
"logger",
".",
"warn",
"(",
"'The attribute \"%s\" was trimmed from \"%s\" to \"%s\"'",
",",
"attr",
",",
"value",
",",
"cleaned_value",
")",
"else",
":",
"cleaned_value",
"=",
"value",
"old_value",
"=",
"getattr",
"(",
"obj",
",",
"attr",
")",
"if",
"cleaned_value",
"!=",
"old_value",
":",
"setattr",
"(",
"obj",
",",
"attr",
",",
"cleaned_value",
")",
"return",
"True",
"return",
"False"
] | Set an attribute of an object to a specific value.
Return True if the attribute was changed and False otherwise. | [
"Set",
"an",
"attribute",
"of",
"an",
"object",
"to",
"a",
"specific",
"value",
"."
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L254-L272 | train | 251,683 |
knaperek/djangosaml2 | djangosaml2/conf.py | config_settings_loader | def config_settings_loader(request=None):
"""Utility function to load the pysaml2 configuration.
This is also the default config loader.
"""
conf = SPConfig()
conf.load(copy.deepcopy(settings.SAML_CONFIG))
return conf | python | def config_settings_loader(request=None):
"""Utility function to load the pysaml2 configuration.
This is also the default config loader.
"""
conf = SPConfig()
conf.load(copy.deepcopy(settings.SAML_CONFIG))
return conf | [
"def",
"config_settings_loader",
"(",
"request",
"=",
"None",
")",
":",
"conf",
"=",
"SPConfig",
"(",
")",
"conf",
".",
"load",
"(",
"copy",
".",
"deepcopy",
"(",
"settings",
".",
"SAML_CONFIG",
")",
")",
"return",
"conf"
] | Utility function to load the pysaml2 configuration.
This is also the default config loader. | [
"Utility",
"function",
"to",
"load",
"the",
"pysaml2",
"configuration",
"."
] | 643969701d3b4257a8d64c5c577602ebaa61de70 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/conf.py#L55-L62 | train | 251,684 |
basho/riak-python-client | riak/transports/http/resources.py | mkpath | def mkpath(*segments, **query):
"""
Constructs the path & query portion of a URI from path segments
and a dict.
"""
# Remove empty segments (e.g. no key specified)
segments = [bytes_to_str(s) for s in segments if s is not None]
# Join the segments into a path
pathstring = '/'.join(segments)
# Remove extra slashes
pathstring = re.sub('/+', '/', pathstring)
# Add the query string if it exists
_query = {}
for key in query:
if query[key] in [False, True]:
_query[key] = str(query[key]).lower()
elif query[key] is not None:
if PY2 and isinstance(query[key], unicode): # noqa
_query[key] = query[key].encode('utf-8')
else:
_query[key] = query[key]
if len(_query) > 0:
pathstring += "?" + urlencode(_query)
if not pathstring.startswith('/'):
pathstring = '/' + pathstring
return pathstring | python | def mkpath(*segments, **query):
"""
Constructs the path & query portion of a URI from path segments
and a dict.
"""
# Remove empty segments (e.g. no key specified)
segments = [bytes_to_str(s) for s in segments if s is not None]
# Join the segments into a path
pathstring = '/'.join(segments)
# Remove extra slashes
pathstring = re.sub('/+', '/', pathstring)
# Add the query string if it exists
_query = {}
for key in query:
if query[key] in [False, True]:
_query[key] = str(query[key]).lower()
elif query[key] is not None:
if PY2 and isinstance(query[key], unicode): # noqa
_query[key] = query[key].encode('utf-8')
else:
_query[key] = query[key]
if len(_query) > 0:
pathstring += "?" + urlencode(_query)
if not pathstring.startswith('/'):
pathstring = '/' + pathstring
return pathstring | [
"def",
"mkpath",
"(",
"*",
"segments",
",",
"*",
"*",
"query",
")",
":",
"# Remove empty segments (e.g. no key specified)",
"segments",
"=",
"[",
"bytes_to_str",
"(",
"s",
")",
"for",
"s",
"in",
"segments",
"if",
"s",
"is",
"not",
"None",
"]",
"# Join the segments into a path",
"pathstring",
"=",
"'/'",
".",
"join",
"(",
"segments",
")",
"# Remove extra slashes",
"pathstring",
"=",
"re",
".",
"sub",
"(",
"'/+'",
",",
"'/'",
",",
"pathstring",
")",
"# Add the query string if it exists",
"_query",
"=",
"{",
"}",
"for",
"key",
"in",
"query",
":",
"if",
"query",
"[",
"key",
"]",
"in",
"[",
"False",
",",
"True",
"]",
":",
"_query",
"[",
"key",
"]",
"=",
"str",
"(",
"query",
"[",
"key",
"]",
")",
".",
"lower",
"(",
")",
"elif",
"query",
"[",
"key",
"]",
"is",
"not",
"None",
":",
"if",
"PY2",
"and",
"isinstance",
"(",
"query",
"[",
"key",
"]",
",",
"unicode",
")",
":",
"# noqa",
"_query",
"[",
"key",
"]",
"=",
"query",
"[",
"key",
"]",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"_query",
"[",
"key",
"]",
"=",
"query",
"[",
"key",
"]",
"if",
"len",
"(",
"_query",
")",
">",
"0",
":",
"pathstring",
"+=",
"\"?\"",
"+",
"urlencode",
"(",
"_query",
")",
"if",
"not",
"pathstring",
".",
"startswith",
"(",
"'/'",
")",
":",
"pathstring",
"=",
"'/'",
"+",
"pathstring",
"return",
"pathstring"
] | Constructs the path & query portion of a URI from path segments
and a dict. | [
"Constructs",
"the",
"path",
"&",
"query",
"portion",
"of",
"a",
"URI",
"from",
"path",
"segments",
"and",
"a",
"dict",
"."
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L275-L304 | train | 251,685 |
basho/riak-python-client | riak/transports/http/resources.py | HttpResources.search_index_path | def search_index_path(self, index=None, **options):
"""
Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_index:
raise RiakError("Yokozuna search is unsupported by this Riak node")
if index:
quote_plus(index)
return mkpath(self.yz_wm_index, "index", index, **options) | python | def search_index_path(self, index=None, **options):
"""
Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_index:
raise RiakError("Yokozuna search is unsupported by this Riak node")
if index:
quote_plus(index)
return mkpath(self.yz_wm_index, "index", index, **options) | [
"def",
"search_index_path",
"(",
"self",
",",
"index",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_index",
":",
"raise",
"RiakError",
"(",
"\"Yokozuna search is unsupported by this Riak node\"",
")",
"if",
"index",
":",
"quote_plus",
"(",
"index",
")",
"return",
"mkpath",
"(",
"self",
".",
"yz_wm_index",
",",
"\"index\"",
",",
"index",
",",
"*",
"*",
"options",
")"
] | Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string | [
"Builds",
"a",
"Yokozuna",
"search",
"index",
"URL",
"."
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L110-L124 | train | 251,686 |
basho/riak-python-client | riak/transports/http/resources.py | HttpResources.search_schema_path | def search_schema_path(self, index, **options):
"""
Builds a Yokozuna search Solr schema URL.
:param index: a name of a yz solr schema
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_schema:
raise RiakError("Yokozuna search is unsupported by this Riak node")
return mkpath(self.yz_wm_schema, "schema", quote_plus(index),
**options) | python | def search_schema_path(self, index, **options):
"""
Builds a Yokozuna search Solr schema URL.
:param index: a name of a yz solr schema
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_schema:
raise RiakError("Yokozuna search is unsupported by this Riak node")
return mkpath(self.yz_wm_schema, "schema", quote_plus(index),
**options) | [
"def",
"search_schema_path",
"(",
"self",
",",
"index",
",",
"*",
"*",
"options",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_schema",
":",
"raise",
"RiakError",
"(",
"\"Yokozuna search is unsupported by this Riak node\"",
")",
"return",
"mkpath",
"(",
"self",
".",
"yz_wm_schema",
",",
"\"schema\"",
",",
"quote_plus",
"(",
"index",
")",
",",
"*",
"*",
"options",
")"
] | Builds a Yokozuna search Solr schema URL.
:param index: a name of a yz solr schema
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string | [
"Builds",
"a",
"Yokozuna",
"search",
"Solr",
"schema",
"URL",
"."
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L126-L139 | train | 251,687 |
basho/riak-python-client | riak/datatypes/hll.py | Hll.to_op | def to_op(self):
"""
Extracts the modification operation from the Hll.
:rtype: dict, None
"""
if not self._adds:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
return changes | python | def to_op(self):
"""
Extracts the modification operation from the Hll.
:rtype: dict, None
"""
if not self._adds:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
return changes | [
"def",
"to_op",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_adds",
":",
"return",
"None",
"changes",
"=",
"{",
"}",
"if",
"self",
".",
"_adds",
":",
"changes",
"[",
"'adds'",
"]",
"=",
"list",
"(",
"self",
".",
"_adds",
")",
"return",
"changes"
] | Extracts the modification operation from the Hll.
:rtype: dict, None | [
"Extracts",
"the",
"modification",
"operation",
"from",
"the",
"Hll",
"."
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/hll.py#L49-L60 | train | 251,688 |
basho/riak-python-client | riak/datatypes/hll.py | Hll.add | def add(self, element):
"""
Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str
"""
if not isinstance(element, six.string_types):
raise TypeError("Hll elements can only be strings")
self._adds.add(element) | python | def add(self, element):
"""
Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str
"""
if not isinstance(element, six.string_types):
raise TypeError("Hll elements can only be strings")
self._adds.add(element) | [
"def",
"add",
"(",
"self",
",",
"element",
")",
":",
"if",
"not",
"isinstance",
"(",
"element",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"\"Hll elements can only be strings\"",
")",
"self",
".",
"_adds",
".",
"add",
"(",
"element",
")"
] | Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str | [
"Adds",
"an",
"element",
"to",
"the",
"HyperLogLog",
".",
"Datatype",
"cardinality",
"will",
"be",
"updated",
"when",
"the",
"object",
"is",
"saved",
"."
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/hll.py#L62-L72 | train | 251,689 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.ping | def ping(self):
"""
Check server is alive over HTTP
"""
status, _, body = self._request('GET', self.ping_path())
return(status is not None) and (bytes_to_str(body) == 'OK') | python | def ping(self):
"""
Check server is alive over HTTP
"""
status, _, body = self._request('GET', self.ping_path())
return(status is not None) and (bytes_to_str(body) == 'OK') | [
"def",
"ping",
"(",
"self",
")",
":",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"self",
".",
"ping_path",
"(",
")",
")",
"return",
"(",
"status",
"is",
"not",
"None",
")",
"and",
"(",
"bytes_to_str",
"(",
"body",
")",
"==",
"'OK'",
")"
] | Check server is alive over HTTP | [
"Check",
"server",
"is",
"alive",
"over",
"HTTP"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L68-L73 | train | 251,690 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.stats | def stats(self):
"""
Gets performance statistics and server information
"""
status, _, body = self._request('GET', self.stats_path(),
{'Accept': 'application/json'})
if status == 200:
return json.loads(bytes_to_str(body))
else:
return None | python | def stats(self):
"""
Gets performance statistics and server information
"""
status, _, body = self._request('GET', self.stats_path(),
{'Accept': 'application/json'})
if status == 200:
return json.loads(bytes_to_str(body))
else:
return None | [
"def",
"stats",
"(",
"self",
")",
":",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"self",
".",
"stats_path",
"(",
")",
",",
"{",
"'Accept'",
":",
"'application/json'",
"}",
")",
"if",
"status",
"==",
"200",
":",
"return",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"else",
":",
"return",
"None"
] | Gets performance statistics and server information | [
"Gets",
"performance",
"statistics",
"and",
"server",
"information"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L75-L84 | train | 251,691 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.get_keys | def get_keys(self, bucket, timeout=None):
"""
Fetch a list of keys for the bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.key_list_path(bucket.name, bucket_type=bucket_type,
timeout=timeout)
status, _, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['keys']
else:
raise RiakError('Error listing keys.') | python | def get_keys(self, bucket, timeout=None):
"""
Fetch a list of keys for the bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.key_list_path(bucket.name, bucket_type=bucket_type,
timeout=timeout)
status, _, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['keys']
else:
raise RiakError('Error listing keys.') | [
"def",
"get_keys",
"(",
"self",
",",
"bucket",
",",
"timeout",
"=",
"None",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"key_list_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
",",
"timeout",
"=",
"timeout",
")",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'keys'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error listing keys.'",
")"
] | Fetch a list of keys for the bucket | [
"Fetch",
"a",
"list",
"of",
"keys",
"for",
"the",
"bucket"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L199-L212 | train | 251,692 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.get_buckets | def get_buckets(self, bucket_type=None, timeout=None):
"""
Fetch a list of all buckets
"""
bucket_type = self._get_bucket_type(bucket_type)
url = self.bucket_list_path(bucket_type=bucket_type,
timeout=timeout)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['buckets']
else:
raise RiakError('Error getting buckets.') | python | def get_buckets(self, bucket_type=None, timeout=None):
"""
Fetch a list of all buckets
"""
bucket_type = self._get_bucket_type(bucket_type)
url = self.bucket_list_path(bucket_type=bucket_type,
timeout=timeout)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['buckets']
else:
raise RiakError('Error getting buckets.') | [
"def",
"get_buckets",
"(",
"self",
",",
"bucket_type",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_list_path",
"(",
"bucket_type",
"=",
"bucket_type",
",",
"timeout",
"=",
"timeout",
")",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'buckets'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting buckets.'",
")"
] | Fetch a list of all buckets | [
"Fetch",
"a",
"list",
"of",
"all",
"buckets"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L225-L238 | train | 251,693 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.get_bucket_props | def get_bucket_props(self, bucket):
"""
Get properties for a bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket properties.') | python | def get_bucket_props(self, bucket):
"""
Get properties for a bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket properties.') | [
"def",
"get_bucket_props",
"(",
"self",
",",
"bucket",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
")",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'props'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting bucket properties.'",
")"
] | Get properties for a bucket | [
"Get",
"properties",
"for",
"a",
"bucket"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L258-L271 | train | 251,694 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.set_bucket_props | def set_bucket_props(self, bucket, props):
"""
Set the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, body = self._request('PUT', url, headers, content)
if status == 401:
raise SecurityError('Not authorized to set bucket properties.')
elif status != 204:
raise RiakError('Error setting bucket properties.')
return True | python | def set_bucket_props(self, bucket, props):
"""
Set the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, body = self._request('PUT', url, headers, content)
if status == 401:
raise SecurityError('Not authorized to set bucket properties.')
elif status != 204:
raise RiakError('Error setting bucket properties.')
return True | [
"def",
"set_bucket_props",
"(",
"self",
",",
"bucket",
",",
"props",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"content",
"=",
"json",
".",
"dumps",
"(",
"{",
"'props'",
":",
"props",
"}",
")",
"# Run the request...",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"url",
",",
"headers",
",",
"content",
")",
"if",
"status",
"==",
"401",
":",
"raise",
"SecurityError",
"(",
"'Not authorized to set bucket properties.'",
")",
"elif",
"status",
"!=",
"204",
":",
"raise",
"RiakError",
"(",
"'Error setting bucket properties.'",
")",
"return",
"True"
] | Set the properties on the bucket object given | [
"Set",
"the",
"properties",
"on",
"the",
"bucket",
"object",
"given"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L273-L290 | train | 251,695 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.clear_bucket_props | def clear_bucket_props(self, bucket):
"""
reset the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
url = self.bucket_properties_path(bucket.name)
headers = {'Content-Type': 'application/json'}
# Run the request...
status, _, _ = self._request('DELETE', url, headers, None)
if status == 204:
return True
elif status == 405:
return False
else:
raise RiakError('Error %s clearing bucket properties.'
% status) | python | def clear_bucket_props(self, bucket):
"""
reset the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
url = self.bucket_properties_path(bucket.name)
headers = {'Content-Type': 'application/json'}
# Run the request...
status, _, _ = self._request('DELETE', url, headers, None)
if status == 204:
return True
elif status == 405:
return False
else:
raise RiakError('Error %s clearing bucket properties.'
% status) | [
"def",
"clear_bucket_props",
"(",
"self",
",",
"bucket",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"# Run the request...",
"status",
",",
"_",
",",
"_",
"=",
"self",
".",
"_request",
"(",
"'DELETE'",
",",
"url",
",",
"headers",
",",
"None",
")",
"if",
"status",
"==",
"204",
":",
"return",
"True",
"elif",
"status",
"==",
"405",
":",
"return",
"False",
"else",
":",
"raise",
"RiakError",
"(",
"'Error %s clearing bucket properties.'",
"%",
"status",
")"
] | reset the properties on the bucket object given | [
"reset",
"the",
"properties",
"on",
"the",
"bucket",
"object",
"given"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L292-L311 | train | 251,696 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.get_bucket_type_props | def get_bucket_type_props(self, bucket_type):
"""
Get properties for a bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket-type properties.') | python | def get_bucket_type_props(self, bucket_type):
"""
Get properties for a bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket-type properties.') | [
"def",
"get_bucket_type_props",
"(",
"self",
",",
"bucket_type",
")",
":",
"self",
".",
"_check_bucket_types",
"(",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_type_properties_path",
"(",
"bucket_type",
".",
"name",
")",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'props'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting bucket-type properties.'",
")"
] | Get properties for a bucket-type | [
"Get",
"properties",
"for",
"a",
"bucket",
"-",
"type"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L313-L325 | train | 251,697 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.set_bucket_type_props | def set_bucket_type_props(self, bucket_type, props):
"""
Set the properties on the bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, _ = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error setting bucket-type properties.')
return True | python | def set_bucket_type_props(self, bucket_type, props):
"""
Set the properties on the bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, _ = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error setting bucket-type properties.')
return True | [
"def",
"set_bucket_type_props",
"(",
"self",
",",
"bucket_type",
",",
"props",
")",
":",
"self",
".",
"_check_bucket_types",
"(",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_type_properties_path",
"(",
"bucket_type",
".",
"name",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"content",
"=",
"json",
".",
"dumps",
"(",
"{",
"'props'",
":",
"props",
"}",
")",
"# Run the request...",
"status",
",",
"_",
",",
"_",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"url",
",",
"headers",
",",
"content",
")",
"if",
"status",
"!=",
"204",
":",
"raise",
"RiakError",
"(",
"'Error setting bucket-type properties.'",
")",
"return",
"True"
] | Set the properties on the bucket-type | [
"Set",
"the",
"properties",
"on",
"the",
"bucket",
"-",
"type"
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L327-L341 | train | 251,698 |
basho/riak-python-client | riak/transports/http/transport.py | HttpTransport.mapred | def mapred(self, inputs, query, timeout=None):
"""
Run a MapReduce query.
"""
# Construct the job, optionally set the timeout...
content = self._construct_mapred_json(inputs, query, timeout)
# Do the request...
url = self.mapred_path()
headers = {'Content-Type': 'application/json'}
status, headers, body = self._request('POST', url, headers, content)
# Make sure the expected status code came back...
if status != 200:
raise RiakError(
'Error running MapReduce operation. Headers: %s Body: %s' %
(repr(headers), repr(body)))
result = json.loads(bytes_to_str(body))
return result | python | def mapred(self, inputs, query, timeout=None):
"""
Run a MapReduce query.
"""
# Construct the job, optionally set the timeout...
content = self._construct_mapred_json(inputs, query, timeout)
# Do the request...
url = self.mapred_path()
headers = {'Content-Type': 'application/json'}
status, headers, body = self._request('POST', url, headers, content)
# Make sure the expected status code came back...
if status != 200:
raise RiakError(
'Error running MapReduce operation. Headers: %s Body: %s' %
(repr(headers), repr(body)))
result = json.loads(bytes_to_str(body))
return result | [
"def",
"mapred",
"(",
"self",
",",
"inputs",
",",
"query",
",",
"timeout",
"=",
"None",
")",
":",
"# Construct the job, optionally set the timeout...",
"content",
"=",
"self",
".",
"_construct_mapred_json",
"(",
"inputs",
",",
"query",
",",
"timeout",
")",
"# Do the request...",
"url",
"=",
"self",
".",
"mapred_path",
"(",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'POST'",
",",
"url",
",",
"headers",
",",
"content",
")",
"# Make sure the expected status code came back...",
"if",
"status",
"!=",
"200",
":",
"raise",
"RiakError",
"(",
"'Error running MapReduce operation. Headers: %s Body: %s'",
"%",
"(",
"repr",
"(",
"headers",
")",
",",
"repr",
"(",
"body",
")",
")",
")",
"result",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"result"
] | Run a MapReduce query. | [
"Run",
"a",
"MapReduce",
"query",
"."
] | 91de13a16607cdf553d1a194e762734e3bec4231 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L343-L362 | train | 251,699 |