max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
tencentcloud/vpc/v20170312/models.py | yangyimincn/tencentcloud-sdk-python | 0 | 6700 | # -*- coding: utf8 -*-
# Copyright 1999-2017 Tencent Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tencentcloud.common.abstract_model import AbstractModel
class AccountAttribute(AbstractModel):
"""账户属性对象
"""
def __init__(self):
"""
:param AttributeName: 属性名
:type AttributeName: str
:param AttributeValues: 属性值
:type AttributeValues: list of str
"""
self.AttributeName = None
self.AttributeValues = None
def _deserialize(self, params):
self.AttributeName = params.get("AttributeName")
self.AttributeValues = params.get("AttributeValues")
class Address(AbstractModel):
"""描述 EIP 信息
"""
def __init__(self):
"""
:param AddressId: `EIP`的`ID`,是`EIP`的唯一标识。
:type AddressId: str
:param AddressName: `EIP`名称。
:type AddressName: str
:param AddressStatus: `EIP`状态。
:type AddressStatus: str
:param AddressIp: 弹性外网IP
:type AddressIp: str
:param BindedResourceId: 绑定的资源实例`ID`。可能是一个`CVM`,`NAT`,或是弹性网卡。
:type BindedResourceId: str
:param CreatedTime: 创建时间。按照`ISO8601`标准表示,并且使用`UTC`时间。格式为:`YYYY-MM-DDThh:mm:ssZ`。
:type CreatedTime: str
"""
self.AddressId = None
self.AddressName = None
self.AddressStatus = None
self.AddressIp = None
self.BindedResourceId = None
self.CreatedTime = None
def _deserialize(self, params):
self.AddressId = params.get("AddressId")
self.AddressName = params.get("AddressName")
self.AddressStatus = params.get("AddressStatus")
self.AddressIp = params.get("AddressIp")
self.BindedResourceId = params.get("BindedResourceId")
self.CreatedTime = params.get("CreatedTime")
class AddressTemplate(AbstractModel):
"""IP地址模板
"""
def __init__(self):
"""
:param AddressTemplateName: IP地址模板名称。
:type AddressTemplateName: str
:param AddressTemplateId: IP地址模板实例唯一ID。
:type AddressTemplateId: str
:param AddressSet: IP地址信息。
:type AddressSet: list of str
:param CreatedTime: 创建时间。
:type CreatedTime: str
"""
self.AddressTemplateName = None
self.AddressTemplateId = None
self.AddressSet = None
self.CreatedTime = None
def _deserialize(self, params):
self.AddressTemplateName = params.get("AddressTemplateName")
self.AddressTemplateId = params.get("AddressTemplateId")
self.AddressSet = params.get("AddressSet")
self.CreatedTime = params.get("CreatedTime")
class AddressTemplateGroup(AbstractModel):
"""IP地址模板集合
"""
def __init__(self):
"""
:param AddressTemplateGroupName: IP地址模板集合名称。
:type AddressTemplateGroupName: str
:param AddressTemplateGroupId: IP地址模板集合实例ID,例如:ipmg-dih8xdbq。
:type AddressTemplateGroupId: str
:param AddressTemplateIdSet: IP地址模板ID。
:type AddressTemplateIdSet: list of str
:param CreatedTime: 创建时间。
:type CreatedTime: str
"""
self.AddressTemplateGroupName = None
self.AddressTemplateGroupId = None
self.AddressTemplateIdSet = None
self.CreatedTime = None
def _deserialize(self, params):
self.AddressTemplateGroupName = params.get("AddressTemplateGroupName")
self.AddressTemplateGroupId = params.get("AddressTemplateGroupId")
self.AddressTemplateIdSet = params.get("AddressTemplateIdSet")
self.CreatedTime = params.get("CreatedTime")
class AllocateAddressesRequest(AbstractModel):
"""AllocateAddresses请求参数结构体
"""
def __init__(self):
"""
:param AddressCount: 申请 EIP 数量,默认值为1。
:type AddressCount: int
"""
self.AddressCount = None
def _deserialize(self, params):
self.AddressCount = params.get("AddressCount")
class AllocateAddressesResponse(AbstractModel):
"""AllocateAddresses返回参数结构体
"""
def __init__(self):
"""
:param AddressSet: 申请到的 EIP 的唯一 ID 列表。
:type AddressSet: list of str
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.AddressSet = None
self.RequestId = None
def _deserialize(self, params):
self.AddressSet = params.get("AddressSet")
self.RequestId = params.get("RequestId")
class AssignPrivateIpAddressesRequest(AbstractModel):
"""AssignPrivateIpAddresses请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
:param PrivateIpAddresses: 指定的内网IP信息。
:type PrivateIpAddresses: list of PrivateIpAddressSpecification
:param SecondaryPrivateIpAddressCount: 新申请的内网IP地址个数。
:type SecondaryPrivateIpAddressCount: int
"""
self.NetworkInterfaceId = None
self.PrivateIpAddresses = None
self.SecondaryPrivateIpAddressCount = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
if params.get("PrivateIpAddresses") is not None:
self.PrivateIpAddresses = []
for item in params.get("PrivateIpAddresses"):
obj = PrivateIpAddressSpecification()
obj._deserialize(item)
self.PrivateIpAddresses.append(obj)
self.SecondaryPrivateIpAddressCount = params.get("SecondaryPrivateIpAddressCount")
class AssignPrivateIpAddressesResponse(AbstractModel):
"""AssignPrivateIpAddresses返回参数结构体
"""
def __init__(self):
"""
:param PrivateIpAddressSet: 内网IP详细信息。
:type PrivateIpAddressSet: list of PrivateIpAddressSpecification
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.PrivateIpAddressSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("PrivateIpAddressSet") is not None:
self.PrivateIpAddressSet = []
for item in params.get("PrivateIpAddressSet"):
obj = PrivateIpAddressSpecification()
obj._deserialize(item)
self.PrivateIpAddressSet.append(obj)
self.RequestId = params.get("RequestId")
class AssociateAddressRequest(AbstractModel):
"""AssociateAddress请求参数结构体
"""
def __init__(self):
"""
:param AddressId: 标识 EIP 的唯一 ID。EIP 唯一 ID 形如:`eip-11112222`。
:type AddressId: str
:param InstanceId: 要绑定的实例 ID。实例 ID 形如:`ins-11112222`。可通过登录[控制台](https://console.cloud.tencent.com/cvm)查询,也可通过 [DescribeInstances](https://cloud.tencent.com/document/api/213/9389) 接口返回值中的`InstanceId`获取。
:type InstanceId: str
:param NetworkInterfaceId: 要绑定的弹性网卡 ID。 弹性网卡 ID 形如:`eni-11112222`。`NetworkInterfaceId` 与 `InstanceId` 不可同时指定。弹性网卡 ID 可通过登录[控制台](https://console.cloud.tencent.com/vpc/eni)查询,也可通过[DescribeNetworkInterfaces](https://cloud.tencent.com/document/api/215/4814)接口返回值中的`networkInterfaceId`获取。
:type NetworkInterfaceId: str
:param PrivateIpAddress: 要绑定的内网 IP。如果指定了 `NetworkInterfaceId` 则也必须指定 `PrivateIpAddress` ,表示将 EIP 绑定到指定弹性网卡的指定内网 IP 上。同时要确保指定的 `PrivateIpAddress` 是指定的 `NetworkInterfaceId` 上的一个内网 IP。指定弹性网卡的内网 IP 可通过登录[控制台](https://console.cloud.tencent.com/vpc/eni)查询,也可通过[DescribeNetworkInterfaces](https://cloud.tencent.com/document/api/215/4814)接口返回值中的`privateIpAddress`获取。
:type PrivateIpAddress: str
"""
self.AddressId = None
self.InstanceId = None
self.NetworkInterfaceId = None
self.PrivateIpAddress = None
def _deserialize(self, params):
self.AddressId = params.get("AddressId")
self.InstanceId = params.get("InstanceId")
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
self.PrivateIpAddress = params.get("PrivateIpAddress")
class AssociateAddressResponse(AbstractModel):
"""AssociateAddress返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AttachClassicLinkVpcRequest(AbstractModel):
"""AttachClassicLinkVpc请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID
:type VpcId: str
:param InstanceIds: CVM实例ID
:type InstanceIds: list of str
"""
self.VpcId = None
self.InstanceIds = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.InstanceIds = params.get("InstanceIds")
class AttachClassicLinkVpcResponse(AbstractModel):
"""AttachClassicLinkVpc返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AttachNetworkInterfaceRequest(AbstractModel):
"""AttachNetworkInterface请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
:param InstanceId: CVM实例ID。形如:ins-r8hr2upy。
:type InstanceId: str
"""
self.NetworkInterfaceId = None
self.InstanceId = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
self.InstanceId = params.get("InstanceId")
class AttachNetworkInterfaceResponse(AbstractModel):
"""AttachNetworkInterface返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ClassicLinkInstance(AbstractModel):
"""私有网络和基础网络互通设备
"""
def __init__(self):
"""
:param VpcId: VPC实例ID
:type VpcId: str
:param InstanceId: 云服务器实例唯一ID
:type InstanceId: str
"""
self.VpcId = None
self.InstanceId = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.InstanceId = params.get("InstanceId")
class CreateAddressTemplateGroupRequest(AbstractModel):
"""CreateAddressTemplateGroup请求参数结构体
"""
def __init__(self):
"""
:param AddressTemplateGroupName: IP地址模版集合名称。
:type AddressTemplateGroupName: str
:param AddressTemplateIds: IP地址模版实例ID,例如:ipm-mdunqeb6。
:type AddressTemplateIds: list of str
"""
self.AddressTemplateGroupName = None
self.AddressTemplateIds = None
def _deserialize(self, params):
self.AddressTemplateGroupName = params.get("AddressTemplateGroupName")
self.AddressTemplateIds = params.get("AddressTemplateIds")
class CreateAddressTemplateGroupResponse(AbstractModel):
"""CreateAddressTemplateGroup返回参数结构体
"""
def __init__(self):
"""
:param AddressTemplateGroup: IP地址模板集合对象。
:type AddressTemplateGroup: :class:`tencentcloud.vpc.v20170312.models.AddressTemplateGroup`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.AddressTemplateGroup = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AddressTemplateGroup") is not None:
self.AddressTemplateGroup = AddressTemplateGroup()
self.AddressTemplateGroup._deserialize(params.get("AddressTemplateGroup"))
self.RequestId = params.get("RequestId")
class CreateAddressTemplateRequest(AbstractModel):
"""CreateAddressTemplate请求参数结构体
"""
def __init__(self):
"""
:param AddressTemplateName: IP地址模版名称
:type AddressTemplateName: str
:param Addresses: 地址信息,支持 IP、CIDR、IP 范围。
:type Addresses: list of str
"""
self.AddressTemplateName = None
self.Addresses = None
def _deserialize(self, params):
self.AddressTemplateName = params.get("AddressTemplateName")
self.Addresses = params.get("Addresses")
class CreateAddressTemplateResponse(AbstractModel):
"""CreateAddressTemplate返回参数结构体
"""
def __init__(self):
"""
:param AddressTemplate: IP地址模板对象。
:type AddressTemplate: :class:`tencentcloud.vpc.v20170312.models.AddressTemplate`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.AddressTemplate = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AddressTemplate") is not None:
self.AddressTemplate = AddressTemplate()
self.AddressTemplate._deserialize(params.get("AddressTemplate"))
self.RequestId = params.get("RequestId")
class CreateCustomerGatewayRequest(AbstractModel):
"""CreateCustomerGateway请求参数结构体
"""
def __init__(self):
"""
:param CustomerGatewayName: 对端网关名称,可任意命名,但不得超过60个字符。
:type CustomerGatewayName: str
:param IpAddress: 对端网关公网IP。
:type IpAddress: str
"""
self.CustomerGatewayName = None
self.IpAddress = None
def _deserialize(self, params):
self.CustomerGatewayName = params.get("CustomerGatewayName")
self.IpAddress = params.get("IpAddress")
class CreateCustomerGatewayResponse(AbstractModel):
"""CreateCustomerGateway返回参数结构体
"""
def __init__(self):
"""
:param CustomerGateway: 对端网关对象
:type CustomerGateway: :class:`tencentcloud.vpc.v20170312.models.CustomerGateway`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.CustomerGateway = None
self.RequestId = None
def _deserialize(self, params):
if params.get("CustomerGateway") is not None:
self.CustomerGateway = CustomerGateway()
self.CustomerGateway._deserialize(params.get("CustomerGateway"))
self.RequestId = params.get("RequestId")
class CreateDefaultVpcRequest(AbstractModel):
"""CreateDefaultVpc请求参数结构体
"""
def __init__(self):
"""
:param Zone: 子网所在的可用区ID,不指定将随机选择可用区
:type Zone: str
:param Force: 是否强制返回默认VPC
:type Force: bool
"""
self.Zone = None
self.Force = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
self.Force = params.get("Force")
class CreateDefaultVpcResponse(AbstractModel):
"""CreateDefaultVpc返回参数结构体
"""
def __init__(self):
"""
:param Vpc: 默认VPC和子网ID
:type Vpc: :class:`tencentcloud.vpc.v20170312.models.DefaultVpcSubnet`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.Vpc = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Vpc") is not None:
self.Vpc = DefaultVpcSubnet()
self.Vpc._deserialize(params.get("Vpc"))
self.RequestId = params.get("RequestId")
class CreateNetworkInterfaceRequest(AbstractModel):
"""CreateNetworkInterface请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
:param NetworkInterfaceName: 弹性网卡名称,最大长度不能超过60个字节。
:type NetworkInterfaceName: str
:param SubnetId: 弹性网卡所在的子网实例ID,例如:subnet-0ap8nwca。
:type SubnetId: str
:param NetworkInterfaceDescription: 弹性网卡描述,可任意命名,但不得超过60个字符。
:type NetworkInterfaceDescription: str
:param SecondaryPrivateIpAddressCount: 新申请的内网IP地址个数。
:type SecondaryPrivateIpAddressCount: int
:param SecurityGroupIds: 指定绑定的安全组,例如:['sg-1dd51d']。
:type SecurityGroupIds: list of str
:param PrivateIpAddresses: 指定内网IP信息。
:type PrivateIpAddresses: list of PrivateIpAddressSpecification
"""
self.VpcId = None
self.NetworkInterfaceName = None
self.SubnetId = None
self.NetworkInterfaceDescription = None
self.SecondaryPrivateIpAddressCount = None
self.SecurityGroupIds = None
self.PrivateIpAddresses = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.NetworkInterfaceName = params.get("NetworkInterfaceName")
self.SubnetId = params.get("SubnetId")
self.NetworkInterfaceDescription = params.get("NetworkInterfaceDescription")
self.SecondaryPrivateIpAddressCount = params.get("SecondaryPrivateIpAddressCount")
self.SecurityGroupIds = params.get("SecurityGroupIds")
if params.get("PrivateIpAddresses") is not None:
self.PrivateIpAddresses = []
for item in params.get("PrivateIpAddresses"):
obj = PrivateIpAddressSpecification()
obj._deserialize(item)
self.PrivateIpAddresses.append(obj)
class CreateNetworkInterfaceResponse(AbstractModel):
"""CreateNetworkInterface返回参数结构体
"""
def __init__(self):
"""
:param NetworkInterface: 弹性网卡实例。
:type NetworkInterface: :class:`tencentcloud.vpc.v20170312.models.NetworkInterface`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.NetworkInterface = None
self.RequestId = None
def _deserialize(self, params):
if params.get("NetworkInterface") is not None:
self.NetworkInterface = NetworkInterface()
self.NetworkInterface._deserialize(params.get("NetworkInterface"))
self.RequestId = params.get("RequestId")
class CreateRouteTableRequest(AbstractModel):
"""CreateRouteTable请求参数结构体
"""
def __init__(self):
"""
:param VpcId: 待操作的VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
:param RouteTableName: 路由表名称,最大长度不能超过60个字节。
:type RouteTableName: str
"""
self.VpcId = None
self.RouteTableName = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.RouteTableName = params.get("RouteTableName")
class CreateRouteTableResponse(AbstractModel):
"""CreateRouteTable返回参数结构体
"""
def __init__(self):
"""
:param RouteTable: 路由表对象。
:type RouteTable: :class:`tencentcloud.vpc.v20170312.models.RouteTable`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RouteTable = None
self.RequestId = None
def _deserialize(self, params):
if params.get("RouteTable") is not None:
self.RouteTable = RouteTable()
self.RouteTable._deserialize(params.get("RouteTable"))
self.RequestId = params.get("RequestId")
class CreateRoutesRequest(AbstractModel):
"""CreateRoutes请求参数结构体
"""
def __init__(self):
"""
:param RouteTableId: 路由表实例ID。
:type RouteTableId: str
:param Routes: 路由策略对象。
:type Routes: list of Route
"""
self.RouteTableId = None
self.Routes = None
def _deserialize(self, params):
self.RouteTableId = params.get("RouteTableId")
if params.get("Routes") is not None:
self.Routes = []
for item in params.get("Routes"):
obj = Route()
obj._deserialize(item)
self.Routes.append(obj)
class CreateRoutesResponse(AbstractModel):
"""CreateRoutes返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class CreateSecurityGroupPoliciesRequest(AbstractModel):
"""CreateSecurityGroupPolicies请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
:param SecurityGroupPolicySet: 安全组规则集合。
:type SecurityGroupPolicySet: :class:`tencentcloud.vpc.v20170312.models.SecurityGroupPolicySet`
"""
self.SecurityGroupId = None
self.SecurityGroupPolicySet = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
if params.get("SecurityGroupPolicySet") is not None:
self.SecurityGroupPolicySet = SecurityGroupPolicySet()
self.SecurityGroupPolicySet._deserialize(params.get("SecurityGroupPolicySet"))
class CreateSecurityGroupPoliciesResponse(AbstractModel):
"""CreateSecurityGroupPolicies返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class CreateSecurityGroupRequest(AbstractModel):
"""CreateSecurityGroup请求参数结构体
"""
def __init__(self):
"""
:param GroupName: 安全组名称,可任意命名,但不得超过60个字符。
:type GroupName: str
:param GroupDescription: 安全组备注,最多100个字符。
:type GroupDescription: str
:param ProjectId: 项目id,默认0。可在qcloud控制台项目管理页面查询到。
:type ProjectId: str
"""
self.GroupName = None
self.GroupDescription = None
self.ProjectId = None
def _deserialize(self, params):
self.GroupName = params.get("GroupName")
self.GroupDescription = params.get("GroupDescription")
self.ProjectId = params.get("ProjectId")
class CreateSecurityGroupResponse(AbstractModel):
"""CreateSecurityGroup返回参数结构体
"""
def __init__(self):
"""
:param SecurityGroup: 安全组对象。
:type SecurityGroup: :class:`tencentcloud.vpc.v20170312.models.SecurityGroup`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.SecurityGroup = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SecurityGroup") is not None:
self.SecurityGroup = SecurityGroup()
self.SecurityGroup._deserialize(params.get("SecurityGroup"))
self.RequestId = params.get("RequestId")
class CreateServiceTemplateGroupRequest(AbstractModel):
"""CreateServiceTemplateGroup请求参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateGroupName: 协议端口模板集合名称
:type ServiceTemplateGroupName: str
:param ServiceTemplateIds: 协议端口模板实例ID,例如:ppm-4dw6agho。
:type ServiceTemplateIds: list of str
"""
self.ServiceTemplateGroupName = None
self.ServiceTemplateIds = None
def _deserialize(self, params):
self.ServiceTemplateGroupName = params.get("ServiceTemplateGroupName")
self.ServiceTemplateIds = params.get("ServiceTemplateIds")
class CreateServiceTemplateGroupResponse(AbstractModel):
"""CreateServiceTemplateGroup返回参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateGroup: 协议端口模板集合对象。
:type ServiceTemplateGroup: :class:`tencentcloud.vpc.v20170312.models.ServiceTemplateGroup`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.ServiceTemplateGroup = None
self.RequestId = None
def _deserialize(self, params):
if params.get("ServiceTemplateGroup") is not None:
self.ServiceTemplateGroup = ServiceTemplateGroup()
self.ServiceTemplateGroup._deserialize(params.get("ServiceTemplateGroup"))
self.RequestId = params.get("RequestId")
class CreateServiceTemplateRequest(AbstractModel):
"""CreateServiceTemplate请求参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateName: 协议端口模板名称
:type ServiceTemplateName: str
:param Services: 支持单个端口、多个端口、连续端口及所有端口,协议支持:TCP、UDP、ICMP、GRE 协议。
:type Services: list of str
"""
self.ServiceTemplateName = None
self.Services = None
def _deserialize(self, params):
self.ServiceTemplateName = params.get("ServiceTemplateName")
self.Services = params.get("Services")
class CreateServiceTemplateResponse(AbstractModel):
"""CreateServiceTemplate返回参数结构体
"""
def __init__(self):
"""
:param ServiceTemplate: 协议端口模板对象。
:type ServiceTemplate: :class:`tencentcloud.vpc.v20170312.models.ServiceTemplate`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.ServiceTemplate = None
self.RequestId = None
def _deserialize(self, params):
if params.get("ServiceTemplate") is not None:
self.ServiceTemplate = ServiceTemplate()
self.ServiceTemplate._deserialize(params.get("ServiceTemplate"))
self.RequestId = params.get("RequestId")
class CreateSubnetRequest(AbstractModel):
"""CreateSubnet请求参数结构体
"""
def __init__(self):
"""
:param VpcId: 待操作的VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
:param SubnetName: 子网名称,最大长度不能超过60个字节。
:type SubnetName: str
:param CidrBlock: 子网网段,子网网段必须在VPC网段内,相同VPC内子网网段不能重叠。
:type CidrBlock: str
:param Zone: 子网所在的可用区ID,不同子网选择不同可用区可以做跨可用区灾备。
:type Zone: str
"""
self.VpcId = None
self.SubnetName = None
self.CidrBlock = None
self.Zone = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.SubnetName = params.get("SubnetName")
self.CidrBlock = params.get("CidrBlock")
self.Zone = params.get("Zone")
class CreateSubnetResponse(AbstractModel):
"""CreateSubnet返回参数结构体
"""
def __init__(self):
"""
:param Subnet: 子网对象。
:type Subnet: :class:`tencentcloud.vpc.v20170312.models.Subnet`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.Subnet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Subnet") is not None:
self.Subnet = Subnet()
self.Subnet._deserialize(params.get("Subnet"))
self.RequestId = params.get("RequestId")
class CreateVpcRequest(AbstractModel):
"""CreateVpc请求参数结构体
"""
def __init__(self):
"""
:param VpcName: vpc名称,最大长度不能超过60个字节。
:type VpcName: str
:param CidrBlock: vpc的cidr,只能为10.0.0.0/16,172.16.0.0/12,192.168.0.0/16这三个内网网段内。
:type CidrBlock: str
:param EnableMulticast: 是否开启组播。true: 开启, false: 不开启。
:type EnableMulticast: str
:param DnsServers: DNS地址,最多支持4个
:type DnsServers: list of str
:param DomainName: 域名
:type DomainName: str
"""
self.VpcName = None
self.CidrBlock = None
self.EnableMulticast = None
self.DnsServers = None
self.DomainName = None
def _deserialize(self, params):
self.VpcName = params.get("VpcName")
self.CidrBlock = params.get("CidrBlock")
self.EnableMulticast = params.get("EnableMulticast")
self.DnsServers = params.get("DnsServers")
self.DomainName = params.get("DomainName")
class CreateVpcResponse(AbstractModel):
"""CreateVpc返回参数结构体
"""
def __init__(self):
"""
:param Vpc: Vpc对象。
:type Vpc: :class:`tencentcloud.vpc.v20170312.models.Vpc`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.Vpc = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Vpc") is not None:
self.Vpc = Vpc()
self.Vpc._deserialize(params.get("Vpc"))
self.RequestId = params.get("RequestId")
class CreateVpnConnectionRequest(AbstractModel):
"""CreateVpnConnection请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param CustomerGatewayId: 对端网关ID,例如:cgw-2wqq41m9,可通过DescribeCustomerGateways接口查询对端网关。
:type CustomerGatewayId: str
:param VpnConnectionName: 通道名称,可任意命名,但不得超过60个字符。
:type VpnConnectionName: str
:param PreShareKey: 预共享密钥。
:type PreShareKey: str
:param SecurityPolicyDatabases: SPD策略组,例如:{"10.0.0.5/24":["172.16.31.10/16"]},10.0.0.5/24是vpc内网段172.16.31.10/16是IDC网段。用户指定VPC内哪些网段可以和您IDC中哪些网段通信。
:type SecurityPolicyDatabases: list of SecurityPolicyDatabase
:param IKEOptionsSpecification: IKE配置(Internet Key Exchange,因特网密钥交换),IKE具有一套自保护机制,用户配置网络安全协议
:type IKEOptionsSpecification: :class:`tencentcloud.vpc.v20170312.models.IKEOptionsSpecification`
:param IPSECOptionsSpecification: IPSec配置,腾讯云提供IPSec安全会话设置
:type IPSECOptionsSpecification: :class:`tencentcloud.vpc.v20170312.models.IPSECOptionsSpecification`
"""
self.VpcId = None
self.VpnGatewayId = None
self.CustomerGatewayId = None
self.VpnConnectionName = None
self.PreShareKey = None
self.SecurityPolicyDatabases = None
self.IKEOptionsSpecification = None
self.IPSECOptionsSpecification = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.VpnGatewayId = params.get("VpnGatewayId")
self.CustomerGatewayId = params.get("CustomerGatewayId")
self.VpnConnectionName = params.get("VpnConnectionName")
self.PreShareKey = params.get("PreShareKey")
if params.get("SecurityPolicyDatabases") is not None:
self.SecurityPolicyDatabases = []
for item in params.get("SecurityPolicyDatabases"):
obj = SecurityPolicyDatabase()
obj._deserialize(item)
self.SecurityPolicyDatabases.append(obj)
if params.get("IKEOptionsSpecification") is not None:
self.IKEOptionsSpecification = IKEOptionsSpecification()
self.IKEOptionsSpecification._deserialize(params.get("IKEOptionsSpecification"))
if params.get("IPSECOptionsSpecification") is not None:
self.IPSECOptionsSpecification = IPSECOptionsSpecification()
self.IPSECOptionsSpecification._deserialize(params.get("IPSECOptionsSpecification"))
class CreateVpnConnectionResponse(AbstractModel):
"""CreateVpnConnection返回参数结构体
"""
def __init__(self):
"""
:param VpnConnection: 通道实例对象。
:type VpnConnection: :class:`tencentcloud.vpc.v20170312.models.VpnConnection`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.VpnConnection = None
self.RequestId = None
def _deserialize(self, params):
if params.get("VpnConnection") is not None:
self.VpnConnection = VpnConnection()
self.VpnConnection._deserialize(params.get("VpnConnection"))
self.RequestId = params.get("RequestId")
class CreateVpnGatewayRequest(AbstractModel):
"""CreateVpnGateway请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
:param VpnGatewayName: VPN网关名称,最大长度不能超过60个字节。
:type VpnGatewayName: str
:param InternetMaxBandwidthOut: 公网带宽设置。可选带宽规格:5, 10, 20, 50, 100;单位:Mbps
:type InternetMaxBandwidthOut: int
:param InstanceChargeType: VPN网关计费模式,PREPAID:表示预付费,即包年包月,POSTPAID_BY_HOUR:表示后付费,即按量计费。默认:POSTPAID_BY_HOUR,如果指定预付费模式,参数InstanceChargePrepaid必填。
:type InstanceChargeType: str
:param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月实例的购买时长、是否设置自动续费等属性。若指定实例的付费模式为预付费则该参数必传。
:type InstanceChargePrepaid: :class:`tencentcloud.vpc.v20170312.models.InstanceChargePrepaid`
"""
self.VpcId = None
self.VpnGatewayName = None
self.InternetMaxBandwidthOut = None
self.InstanceChargeType = None
self.InstanceChargePrepaid = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.VpnGatewayName = params.get("VpnGatewayName")
self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut")
self.InstanceChargeType = params.get("InstanceChargeType")
if params.get("InstanceChargePrepaid") is not None:
self.InstanceChargePrepaid = InstanceChargePrepaid()
self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid"))
class CreateVpnGatewayResponse(AbstractModel):
"""CreateVpnGateway返回参数结构体
"""
def __init__(self):
"""
:param VpnGateway: VPN网关对象
:type VpnGateway: :class:`tencentcloud.vpc.v20170312.models.VpnGateway`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.VpnGateway = None
self.RequestId = None
def _deserialize(self, params):
if params.get("VpnGateway") is not None:
self.VpnGateway = VpnGateway()
self.VpnGateway._deserialize(params.get("VpnGateway"))
self.RequestId = params.get("RequestId")
class CustomerGateway(AbstractModel):
"""对端网关
"""
def __init__(self):
"""
:param CustomerGatewayId: 用户网关唯一ID
:type CustomerGatewayId: str
:param CustomerGatewayName: 网关名称
:type CustomerGatewayName: str
:param IpAddress: 公网地址
:type IpAddress: str
:param CreatedTime: 创建时间
:type CreatedTime: str
"""
self.CustomerGatewayId = None
self.CustomerGatewayName = None
self.IpAddress = None
self.CreatedTime = None
def _deserialize(self, params):
self.CustomerGatewayId = params.get("CustomerGatewayId")
self.CustomerGatewayName = params.get("CustomerGatewayName")
self.IpAddress = params.get("IpAddress")
self.CreatedTime = params.get("CreatedTime")
class CustomerGatewayVendor(AbstractModel):
"""对端网关厂商信息对象。
"""
def __init__(self):
"""
:param Platform: 平台。
:type Platform: str
:param SoftwareVersion: 软件版本。
:type SoftwareVersion: str
:param VendorName: 供应商名称。
:type VendorName: str
"""
self.Platform = None
self.SoftwareVersion = None
self.VendorName = None
def _deserialize(self, params):
self.Platform = params.get("Platform")
self.SoftwareVersion = params.get("SoftwareVersion")
self.VendorName = params.get("VendorName")
class DefaultVpcSubnet(AbstractModel):
"""默认VPC和子网
"""
def __init__(self):
"""
:param VpcId: 默认VpcId
:type VpcId: str
:param SubnetId: 默认SubnetId
:type SubnetId: str
"""
self.VpcId = None
self.SubnetId = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
class DeleteAddressTemplateGroupRequest(AbstractModel):
"""DeleteAddressTemplateGroup请求参数结构体
"""
def __init__(self):
"""
:param AddressTemplateGroupId: IP地址模板集合实例ID,例如:ipmg-90cex8mq。
:type AddressTemplateGroupId: str
"""
self.AddressTemplateGroupId = None
def _deserialize(self, params):
self.AddressTemplateGroupId = params.get("AddressTemplateGroupId")
class DeleteAddressTemplateGroupResponse(AbstractModel):
"""DeleteAddressTemplateGroup返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteAddressTemplateRequest(AbstractModel):
"""DeleteAddressTemplate请求参数结构体
"""
def __init__(self):
"""
:param AddressTemplateId: IP地址模板实例ID,例如:ipm-09o5m8kc。
:type AddressTemplateId: str
"""
self.AddressTemplateId = None
def _deserialize(self, params):
self.AddressTemplateId = params.get("AddressTemplateId")
class DeleteAddressTemplateResponse(AbstractModel):
"""DeleteAddressTemplate返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteCustomerGatewayRequest(AbstractModel):
"""DeleteCustomerGateway请求参数结构体
"""
def __init__(self):
"""
:param CustomerGatewayId: 对端网关ID,例如:cgw-2wqq41m9,可通过DescribeCustomerGateways接口查询对端网关。
:type CustomerGatewayId: str
"""
self.CustomerGatewayId = None
def _deserialize(self, params):
self.CustomerGatewayId = params.get("CustomerGatewayId")
class DeleteCustomerGatewayResponse(AbstractModel):
"""DeleteCustomerGateway返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteNetworkInterfaceRequest(AbstractModel):
"""DeleteNetworkInterface请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
"""
self.NetworkInterfaceId = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
class DeleteNetworkInterfaceResponse(AbstractModel):
"""DeleteNetworkInterface返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteRouteTableRequest(AbstractModel):
"""DeleteRouteTable请求参数结构体
"""
def __init__(self):
"""
:param RouteTableId: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableId: str
"""
self.RouteTableId = None
def _deserialize(self, params):
self.RouteTableId = params.get("RouteTableId")
class DeleteRouteTableResponse(AbstractModel):
"""DeleteRouteTable返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteRoutesRequest(AbstractModel):
"""DeleteRoutes请求参数结构体
"""
def __init__(self):
"""
:param RouteTableId: 路由表实例ID。
:type RouteTableId: str
:param Routes: 路由策略对象。
:type Routes: list of Route
"""
self.RouteTableId = None
self.Routes = None
def _deserialize(self, params):
self.RouteTableId = params.get("RouteTableId")
if params.get("Routes") is not None:
self.Routes = []
for item in params.get("Routes"):
obj = Route()
obj._deserialize(item)
self.Routes.append(obj)
class DeleteRoutesResponse(AbstractModel):
"""DeleteRoutes返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteSecurityGroupPoliciesRequest(AbstractModel):
"""DeleteSecurityGroupPolicies请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
:param SecurityGroupPolicySet: 安全组规则集合。一个请求中只能删除单个方向的一条或多条规则。支持指定索引(PolicyIndex) 匹配删除和安全组规则匹配删除两种方式,一个请求中只能使用一种匹配方式。
:type SecurityGroupPolicySet: :class:`tencentcloud.vpc.v20170312.models.SecurityGroupPolicySet`
"""
self.SecurityGroupId = None
self.SecurityGroupPolicySet = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
if params.get("SecurityGroupPolicySet") is not None:
self.SecurityGroupPolicySet = SecurityGroupPolicySet()
self.SecurityGroupPolicySet._deserialize(params.get("SecurityGroupPolicySet"))
class DeleteSecurityGroupPoliciesResponse(AbstractModel):
"""DeleteSecurityGroupPolicies返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteSecurityGroupRequest(AbstractModel):
"""DeleteSecurityGroup请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
"""
self.SecurityGroupId = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
class DeleteSecurityGroupResponse(AbstractModel):
"""DeleteSecurityGroup返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteServiceTemplateGroupRequest(AbstractModel):
"""DeleteServiceTemplateGroup请求参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateGroupId: 协议端口模板集合实例ID,例如:ppmg-n17uxvve。
:type ServiceTemplateGroupId: str
"""
self.ServiceTemplateGroupId = None
def _deserialize(self, params):
self.ServiceTemplateGroupId = params.get("ServiceTemplateGroupId")
class DeleteServiceTemplateGroupResponse(AbstractModel):
"""DeleteServiceTemplateGroup返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteServiceTemplateRequest(AbstractModel):
"""DeleteServiceTemplate请求参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateId: 协议端口模板实例ID,例如:ppm-e6dy460g。
:type ServiceTemplateId: str
"""
self.ServiceTemplateId = None
def _deserialize(self, params):
self.ServiceTemplateId = params.get("ServiceTemplateId")
class DeleteServiceTemplateResponse(AbstractModel):
"""DeleteServiceTemplate返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteSubnetRequest(AbstractModel):
"""DeleteSubnet请求参数结构体
"""
def __init__(self):
"""
:param SubnetId: 子网实例ID。可通过DescribeSubnets接口返回值中的SubnetId获取。
:type SubnetId: str
"""
self.SubnetId = None
def _deserialize(self, params):
self.SubnetId = params.get("SubnetId")
class DeleteSubnetResponse(AbstractModel):
"""DeleteSubnet返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteVpcRequest(AbstractModel):
"""DeleteVpc请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
"""
self.VpcId = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
class DeleteVpcResponse(AbstractModel):
"""DeleteVpc返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteVpnConnectionRequest(AbstractModel):
"""DeleteVpnConnection请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param VpnConnectionId: VPN通道实例ID。形如:vpnx-f49l6u0z。
:type VpnConnectionId: str
"""
self.VpnGatewayId = None
self.VpnConnectionId = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.VpnConnectionId = params.get("VpnConnectionId")
class DeleteVpnConnectionResponse(AbstractModel):
"""DeleteVpnConnection返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteVpnGatewayRequest(AbstractModel):
"""DeleteVpnGateway请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
"""
self.VpnGatewayId = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
class DeleteVpnGatewayResponse(AbstractModel):
"""DeleteVpnGateway返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DescribeAccountAttributesRequest(AbstractModel):
"""DescribeAccountAttributes请求参数结构体
"""
class DescribeAccountAttributesResponse(AbstractModel):
"""DescribeAccountAttributes返回参数结构体
"""
def __init__(self):
"""
:param AccountAttributeSet: 用户账号属性对象
:type AccountAttributeSet: list of AccountAttribute
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.AccountAttributeSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AccountAttributeSet") is not None:
self.AccountAttributeSet = []
for item in params.get("AccountAttributeSet"):
obj = AccountAttribute()
obj._deserialize(item)
self.AccountAttributeSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeAddressQuotaRequest(AbstractModel):
"""DescribeAddressQuota请求参数结构体
"""
class DescribeAddressQuotaResponse(AbstractModel):
"""DescribeAddressQuota返回参数结构体
"""
def __init__(self):
"""
:param QuotaSet: 账户 EIP 配额信息。
:type QuotaSet: list of Quota
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.QuotaSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("QuotaSet") is not None:
self.QuotaSet = []
for item in params.get("QuotaSet"):
obj = Quota()
obj._deserialize(item)
self.QuotaSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeAddressTemplateGroupsRequest(AbstractModel):
"""DescribeAddressTemplateGroups请求参数结构体
"""
def __init__(self):
"""
:param Filters: 过滤条件。
<li>address-template-group-name - String - (过滤条件)IP地址模板集合名称。</li>
<li>address-template-group-id - String - (过滤条件)IP地址模板实集合例ID,例如:ipmg-mdunqeb6。</li>
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。
:type Offset: str
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: str
"""
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeAddressTemplateGroupsResponse(AbstractModel):
"""DescribeAddressTemplateGroups返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param AddressTemplateGroupSet: IP地址模板。
:type AddressTemplateGroupSet: list of AddressTemplateGroup
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.AddressTemplateGroupSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("AddressTemplateGroupSet") is not None:
self.AddressTemplateGroupSet = []
for item in params.get("AddressTemplateGroupSet"):
obj = AddressTemplateGroup()
obj._deserialize(item)
self.AddressTemplateGroupSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeAddressTemplatesRequest(AbstractModel):
"""DescribeAddressTemplates请求参数结构体
"""
def __init__(self):
"""
:param Filters: 过滤条件。
<li>address-template-name - String - (过滤条件)IP地址模板名称。</li>
<li>address-template-id - String - (过滤条件)IP地址模板实例ID,例如:ipm-mdunqeb6。</li>
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。
:type Offset: str
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: str
"""
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeAddressTemplatesResponse(AbstractModel):
"""DescribeAddressTemplates返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param AddressTemplateSet: IP地址模版。
:type AddressTemplateSet: list of AddressTemplate
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.AddressTemplateSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("AddressTemplateSet") is not None:
self.AddressTemplateSet = []
for item in params.get("AddressTemplateSet"):
obj = AddressTemplate()
obj._deserialize(item)
self.AddressTemplateSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeAddressesRequest(AbstractModel):
"""DescribeAddresses请求参数结构体
"""
def __init__(self):
"""
:param AddressIds: 标识 EIP 的唯一 ID 列表。EIP 唯一 ID 形如:`eip-11112222`。参数不支持同时指定`AddressIds`和`Filters`。
:type AddressIds: list of str
:param Filters: 每次请求的`Filters`的上限为10,`Filter.Values`的上限为5。参数不支持同时指定`AddressIds`和`Filters`。详细的过滤条件如下:
<li> address-id - String - 是否必填:否 - (过滤条件)按照 EIP 的唯一 ID 过滤。EIP 唯一 ID 形如:eip-11112222。</li>
<li> address-name - String - 是否必填:否 - (过滤条件)按照 EIP 名称过滤。不支持模糊过滤。</li>
<li> address-ip - String - 是否必填:否 - (过滤条件)按照 EIP 的 IP 地址过滤。</li>
<li> address-status - String - 是否必填:否 - (过滤条件)按照 EIP 的状态过滤。取值范围:[详见EIP状态列表](https://cloud.tencent.com/document/api/213/9452#eip_state)。</li>
<li> instance-id - String - 是否必填:否 - (过滤条件)按照 EIP 绑定的实例 ID 过滤。实例 ID 形如:ins-11112222。</li>
<li> private-ip-address - String - 是否必填:否 - (过滤条件)按照 EIP 绑定的内网 IP 过滤。</li>
<li> network-interface-id - String - 是否必填:否 - (过滤条件)按照 EIP 绑定的弹性网卡 ID 过滤。弹性网卡 ID 形如:eni-11112222。</li>
<li> is-arrears - String - 是否必填:否 - (过滤条件)按照 EIP 是否欠费进行过滤。(TRUE:EIP 处于欠费状态|FALSE:EIP 费用状态正常)</li>
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。关于`Offset`的更进一步介绍请参考 API [简介](https://cloud.tencent.com/document/api/213/11646)中的相关小节。
:type Offset: int
:param Limit: 返回数量,默认为20,最大值为100。关于`Limit`的更进一步介绍请参考 API [简介](https://cloud.tencent.com/document/api/213/11646)中的相关小节。
:type Limit: int
"""
self.AddressIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.AddressIds = params.get("AddressIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeAddressesResponse(AbstractModel):
"""DescribeAddresses返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的 EIP 数量。
:type TotalCount: int
:param AddressSet: EIP 详细信息列表。
:type AddressSet: list of Address
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.AddressSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("AddressSet") is not None:
self.AddressSet = []
for item in params.get("AddressSet"):
obj = Address()
obj._deserialize(item)
self.AddressSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeClassicLinkInstancesRequest(AbstractModel):
"""DescribeClassicLinkInstances请求参数结构体
"""
def __init__(self):
"""
:param Filters: 过滤条件。
<li>vpc-id - String - (过滤条件)VPC实例ID。</li>
<li>vm-ip - String - (过滤条件)基础网络云主机IP。</li>
:type Filters: list of FilterObject
:param Offset: 偏移量
:type Offset: str
:param Limit: 返回数量
:type Limit: str
"""
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = FilterObject()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeClassicLinkInstancesResponse(AbstractModel):
"""DescribeClassicLinkInstances返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param ClassicLinkInstanceSet: 私有网络和基础网络互通设备。
:type ClassicLinkInstanceSet: list of ClassicLinkInstance
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.ClassicLinkInstanceSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("ClassicLinkInstanceSet") is not None:
self.ClassicLinkInstanceSet = []
for item in params.get("ClassicLinkInstanceSet"):
obj = ClassicLinkInstance()
obj._deserialize(item)
self.ClassicLinkInstanceSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeCustomerGatewayVendorsRequest(AbstractModel):
"""DescribeCustomerGatewayVendors请求参数结构体
"""
class DescribeCustomerGatewayVendorsResponse(AbstractModel):
"""DescribeCustomerGatewayVendors返回参数结构体
"""
def __init__(self):
"""
:param CustomerGatewayVendorSet: 对端网关厂商信息对象。
:type CustomerGatewayVendorSet: list of CustomerGatewayVendor
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.CustomerGatewayVendorSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("CustomerGatewayVendorSet") is not None:
self.CustomerGatewayVendorSet = []
for item in params.get("CustomerGatewayVendorSet"):
obj = CustomerGatewayVendor()
obj._deserialize(item)
self.CustomerGatewayVendorSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeCustomerGatewaysRequest(AbstractModel):
"""DescribeCustomerGateways请求参数结构体
"""
def __init__(self):
"""
:param CustomerGatewayIds: 对端网关ID,例如:cgw-2wqq41m9。每次请求的实例的上限为100。参数不支持同时指定CustomerGatewayIds和Filters。
:type CustomerGatewayIds: list of str
:param Filters: 过滤条件,详见下表:实例过滤条件表。每次请求的Filters的上限为10,Filter.Values的上限为5。参数不支持同时指定CustomerGatewayIds和Filters。
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。关于Offset的更进一步介绍请参考 API 简介中的相关小节。
:type Offset: int
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: int
"""
self.CustomerGatewayIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.CustomerGatewayIds = params.get("CustomerGatewayIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeCustomerGatewaysResponse(AbstractModel):
"""DescribeCustomerGateways返回参数结构体
"""
def __init__(self):
"""
:param CustomerGatewaySet: 对端网关对象列表
:type CustomerGatewaySet: list of CustomerGateway
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.CustomerGatewaySet = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("CustomerGatewaySet") is not None:
self.CustomerGatewaySet = []
for item in params.get("CustomerGatewaySet"):
obj = CustomerGateway()
obj._deserialize(item)
self.CustomerGatewaySet.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeNetworkInterfacesRequest(AbstractModel):
"""DescribeNetworkInterfaces请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceIds: 弹性网卡实例ID查询。形如:eni-pxir56ns。每次请求的实例的上限为100。参数不支持同时指定NetworkInterfaceIds和Filters。
:type NetworkInterfaceIds: list of str
:param Filters: 过滤条件,参数不支持同时指定NetworkInterfaceIds和Filters。
<li>vpc-id - String - (过滤条件)VPC实例ID,形如:vpc-f49l6u0z。</li>
<li>subnet-id - String - (过滤条件)所属子网实例ID,形如:subnet-f49l6u0z。</li>
<li>network-interface-id - String - (过滤条件)弹性网卡实例ID,形如:eni-5k56k7k7。</li>
<li>attachment.instance-id - String - (过滤条件)绑定的云服务器实例ID,形如:ins-3nqpdn3i。</li>
<li>groups.security-group-id - String - (过滤条件)绑定的安全组实例ID,例如:sg-f9ekbxeq。</li>
<li>network-interface-name - String - (过滤条件)网卡实例名称。</li>
<li>network-interface-description - String - (过滤条件)网卡实例描述。</li>
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。
:type Offset: int
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: int
"""
self.NetworkInterfaceIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.NetworkInterfaceIds = params.get("NetworkInterfaceIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeNetworkInterfacesResponse(AbstractModel):
"""DescribeNetworkInterfaces返回参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceSet: 实例详细信息列表。
:type NetworkInterfaceSet: list of NetworkInterface
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.NetworkInterfaceSet = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("NetworkInterfaceSet") is not None:
self.NetworkInterfaceSet = []
for item in params.get("NetworkInterfaceSet"):
obj = NetworkInterface()
obj._deserialize(item)
self.NetworkInterfaceSet.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeRouteTablesRequest(AbstractModel):
"""DescribeRouteTables请求参数结构体
"""
def __init__(self):
"""
:param RouteTableIds: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableIds: list of str
:param Filters: 过滤条件,参数不支持同时指定RouteTableIds和Filters。
<li>route-table-id - String - (过滤条件)路由表实例ID。</li>
<li>route-table-name - String - (过滤条件)路由表名称。</li>
<li>vpc-id - String - (过滤条件)VPC实例ID,形如:vpc-f49l6u0z。</li>
<li>association.main - String - (过滤条件)是否主路由表。</li>
:type Filters: list of Filter
:param Offset: 偏移量。
:type Offset: str
:param Limit: 请求对象个数。
:type Limit: str
"""
self.RouteTableIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.RouteTableIds = params.get("RouteTableIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeRouteTablesResponse(AbstractModel):
"""DescribeRouteTables返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param RouteTableSet: 路由表对象。
:type RouteTableSet: list of RouteTable
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.RouteTableSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("RouteTableSet") is not None:
self.RouteTableSet = []
for item in params.get("RouteTableSet"):
obj = RouteTable()
obj._deserialize(item)
self.RouteTableSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeSecurityGroupAssociationStatisticsRequest(AbstractModel):
"""DescribeSecurityGroupAssociationStatistics请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupIds: 安全实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupIds: list of str
"""
self.SecurityGroupIds = None
def _deserialize(self, params):
self.SecurityGroupIds = params.get("SecurityGroupIds")
class DescribeSecurityGroupAssociationStatisticsResponse(AbstractModel):
"""DescribeSecurityGroupAssociationStatistics返回参数结构体
"""
def __init__(self):
"""
:param SecurityGroupAssociationStatisticsSet: 安全组关联实例统计。
:type SecurityGroupAssociationStatisticsSet: list of SecurityGroupAssociationStatistics
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.SecurityGroupAssociationStatisticsSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SecurityGroupAssociationStatisticsSet") is not None:
self.SecurityGroupAssociationStatisticsSet = []
for item in params.get("SecurityGroupAssociationStatisticsSet"):
obj = SecurityGroupAssociationStatistics()
obj._deserialize(item)
self.SecurityGroupAssociationStatisticsSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeSecurityGroupPoliciesRequest(AbstractModel):
"""DescribeSecurityGroupPolicies请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如:sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
"""
self.SecurityGroupId = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
class DescribeSecurityGroupPoliciesResponse(AbstractModel):
"""DescribeSecurityGroupPolicies返回参数结构体
"""
def __init__(self):
"""
:param SecurityGroupPolicySet: 安全组规则集合。
:type SecurityGroupPolicySet: :class:`tencentcloud.vpc.v20170312.models.SecurityGroupPolicySet`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.SecurityGroupPolicySet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SecurityGroupPolicySet") is not None:
self.SecurityGroupPolicySet = SecurityGroupPolicySet()
self.SecurityGroupPolicySet._deserialize(params.get("SecurityGroupPolicySet"))
self.RequestId = params.get("RequestId")
class DescribeSecurityGroupsRequest(AbstractModel):
"""DescribeSecurityGroups请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupIds: 安全组实例ID,例如:sg-33ocnj9n,可通过DescribeSecurityGroups获取。每次请求的实例的上限为100。参数不支持同时指定SecurityGroupIds和Filters。
:type SecurityGroupIds: list of str
:param Filters: 过滤条件,参数不支持同时指定SecurityGroupIds和Filters。
<li>project-id - Integer - (过滤条件)项目id。</li>
<li>security-group-name - String - (过滤条件)安全组名称。</li>
:type Filters: list of Filter
:param Offset: 偏移量。
:type Offset: str
:param Limit: 返回数量。
:type Limit: str
"""
self.SecurityGroupIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.SecurityGroupIds = params.get("SecurityGroupIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeSecurityGroupsResponse(AbstractModel):
"""DescribeSecurityGroups返回参数结构体
"""
def __init__(self):
"""
:param SecurityGroupSet: 安全组对象。
:type SecurityGroupSet: list of SecurityGroup
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.SecurityGroupSet = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SecurityGroupSet") is not None:
self.SecurityGroupSet = []
for item in params.get("SecurityGroupSet"):
obj = SecurityGroup()
obj._deserialize(item)
self.SecurityGroupSet.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeServiceTemplateGroupsRequest(AbstractModel):
"""DescribeServiceTemplateGroups请求参数结构体
"""
def __init__(self):
"""
:param Filters: 过滤条件。
<li>service-template-group-name - String - (过滤条件)协议端口模板集合名称。</li>
<li>service-template-group-id - String - (过滤条件)协议端口模板集合实例ID,例如:ppmg-e6dy460g。</li>
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。
:type Offset: str
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: str
"""
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeServiceTemplateGroupsResponse(AbstractModel):
"""DescribeServiceTemplateGroups返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param ServiceTemplateGroupSet: 协议端口模板集合。
:type ServiceTemplateGroupSet: list of ServiceTemplateGroup
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.ServiceTemplateGroupSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("ServiceTemplateGroupSet") is not None:
self.ServiceTemplateGroupSet = []
for item in params.get("ServiceTemplateGroupSet"):
obj = ServiceTemplateGroup()
obj._deserialize(item)
self.ServiceTemplateGroupSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeServiceTemplatesRequest(AbstractModel):
"""DescribeServiceTemplates请求参数结构体
"""
def __init__(self):
"""
:param Filters: 过滤条件。
<li>service-template-name - String - (过滤条件)协议端口模板名称。</li>
<li>service-template-id - String - (过滤条件)协议端口模板实例ID,例如:ppm-e6dy460g。</li>
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。
:type Offset: str
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: str
"""
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeServiceTemplatesResponse(AbstractModel):
"""DescribeServiceTemplates返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param ServiceTemplateSet: 协议端口模板对象。
:type ServiceTemplateSet: list of ServiceTemplate
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.ServiceTemplateSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("ServiceTemplateSet") is not None:
self.ServiceTemplateSet = []
for item in params.get("ServiceTemplateSet"):
obj = ServiceTemplate()
obj._deserialize(item)
self.ServiceTemplateSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeSubnetsRequest(AbstractModel):
"""DescribeSubnets请求参数结构体
"""
def __init__(self):
"""
:param SubnetIds: 子网实例ID查询。形如:subnet-pxir56ns。每次请求的实例的上限为100。参数不支持同时指定SubnetIds和Filters。
:type SubnetIds: list of str
:param Filters: 过滤条件,参数不支持同时指定SubnetIds和Filters。
<li>subnet-id - String - (过滤条件)Subnet实例名称。</li>
<li>vpc-id - String - (过滤条件)VPC实例ID,形如:vpc-f49l6u0z。</li>
<li>cidr-block - String - (过滤条件)vpc的cidr。</li>
<li>is-default - Boolean - (过滤条件)是否是默认子网。</li>
<li>subnet-name - String - (过滤条件)子网名称。</li>
<li>zone - String - (过滤条件)可用区。</li>
:type Filters: list of Filter
:param Offset: 偏移量
:type Offset: str
:param Limit: 返回数量
:type Limit: str
"""
self.SubnetIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.SubnetIds = params.get("SubnetIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeSubnetsResponse(AbstractModel):
"""DescribeSubnets返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param SubnetSet: 子网对象。
:type SubnetSet: list of Subnet
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.SubnetSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("SubnetSet") is not None:
self.SubnetSet = []
for item in params.get("SubnetSet"):
obj = Subnet()
obj._deserialize(item)
self.SubnetSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeVpcsRequest(AbstractModel):
"""DescribeVpcs请求参数结构体
"""
def __init__(self):
"""
:param VpcIds: VPC实例ID。形如:vpc-f49l6u0z。每次请求的实例的上限为100。参数不支持同时指定VpcIds和Filters。
:type VpcIds: list of str
:param Filters: 过滤条件,参数不支持同时指定VpcIds和Filters。
<li>vpc-name - String - (过滤条件)VPC实例名称。</li>
<li>is-default - String - (过滤条件)是否默认VPC。</li>
<li>vpc-id - String - (过滤条件)VPC实例ID形如:vpc-f49l6u0z。</li>
<li>cidr-block - String - (过滤条件)vpc的cidr。</li>
:type Filters: list of Filter
:param Offset: 偏移量
:type Offset: str
:param Limit: 返回数量
:type Limit: str
"""
self.VpcIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.VpcIds = params.get("VpcIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeVpcsResponse(AbstractModel):
"""DescribeVpcs返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的对象数。
:type TotalCount: int
:param VpcSet: VPC对象。
:type VpcSet: list of Vpc
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.VpcSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("VpcSet") is not None:
self.VpcSet = []
for item in params.get("VpcSet"):
obj = Vpc()
obj._deserialize(item)
self.VpcSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeVpnConnectionsRequest(AbstractModel):
"""DescribeVpnConnections请求参数结构体
"""
def __init__(self):
"""
:param VpnConnectionIds: VPN通道实例ID。形如:vpnx-f49l6u0z。每次请求的实例的上限为100。参数不支持同时指定VpnConnectionIds和Filters。
:type VpnConnectionIds: list of str
:param Filters: 过滤条件,详见下表:实例过滤条件表。每次请求的Filters的上限为10,Filter.Values的上限为5。参数不支持同时指定VpnConnectionIds和Filters。
:type Filters: list of Filter
:param Offset: 偏移量,默认为0。关于Offset的更进一步介绍请参考 API 简介中的相关小节。
:type Offset: int
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: int
"""
self.VpnConnectionIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.VpnConnectionIds = params.get("VpnConnectionIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = Filter()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeVpnConnectionsResponse(AbstractModel):
"""DescribeVpnConnections返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param VpnConnectionSet: VPN通道实例。
:type VpnConnectionSet: list of VpnConnection
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.VpnConnectionSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("VpnConnectionSet") is not None:
self.VpnConnectionSet = []
for item in params.get("VpnConnectionSet"):
obj = VpnConnection()
obj._deserialize(item)
self.VpnConnectionSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeVpnGatewaysRequest(AbstractModel):
"""DescribeVpnGateways请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayIds: VPN网关实例ID。形如:vpngw-f49l6u0z。每次请求的实例的上限为100。参数不支持同时指定VpnGatewayIds和Filters。
:type VpnGatewayIds: list of str
:param Filters: 过滤器对象属性
:type Filters: list of FilterObject
:param Offset: 偏移量
:type Offset: int
:param Limit: 请求对象个数
:type Limit: int
"""
self.VpnGatewayIds = None
self.Filters = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.VpnGatewayIds = params.get("VpnGatewayIds")
if params.get("Filters") is not None:
self.Filters = []
for item in params.get("Filters"):
obj = FilterObject()
obj._deserialize(item)
self.Filters.append(obj)
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
class DescribeVpnGatewaysResponse(AbstractModel):
"""DescribeVpnGateways返回参数结构体
"""
def __init__(self):
"""
:param TotalCount: 符合条件的实例数量。
:type TotalCount: int
:param VpnGatewaySet: VPN网关实例详细信息列表。
:type VpnGatewaySet: list of VpnGateway
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.VpnGatewaySet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("VpnGatewaySet") is not None:
self.VpnGatewaySet = []
for item in params.get("VpnGatewaySet"):
obj = VpnGateway()
obj._deserialize(item)
self.VpnGatewaySet.append(obj)
self.RequestId = params.get("RequestId")
class DetachClassicLinkVpcRequest(AbstractModel):
"""DetachClassicLinkVpc请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。可通过DescribeVpcs接口返回值中的VpcId获取。
:type VpcId: str
:param InstanceIds: CVM实例ID查询。形如:ins-r8hr2upy。
:type InstanceIds: list of str
"""
self.VpcId = None
self.InstanceIds = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.InstanceIds = params.get("InstanceIds")
class DetachClassicLinkVpcResponse(AbstractModel):
"""DetachClassicLinkVpc返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DetachNetworkInterfaceRequest(AbstractModel):
"""DetachNetworkInterface请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
:param InstanceId: CVM实例ID。形如:ins-r8hr2upy。
:type InstanceId: str
"""
self.NetworkInterfaceId = None
self.InstanceId = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
self.InstanceId = params.get("InstanceId")
class DetachNetworkInterfaceResponse(AbstractModel):
"""DetachNetworkInterface返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DisassociateAddressRequest(AbstractModel):
"""DisassociateAddress请求参数结构体
"""
def __init__(self):
"""
:param AddressId: 标识 EIP 的唯一 ID。EIP 唯一 ID 形如:`eip-11112222`。
:type AddressId: str
:param ReallocateNormalPublicIp: 表示解绑 EIP 之后是否分配普通公网 IP。取值范围:<br><li>TRUE:表示解绑 EIP 之后分配普通公网 IP。<br><li>FALSE:表示解绑 EIP 之后不分配普通公网 IP。<br>默认取值:FALSE。<br><br>只有满足以下条件时才能指定该参数:<br><li> 只有在解绑主网卡的主内网 IP 上的 EIP 时才能指定该参数。<br><li>解绑 EIP 后重新分配普通公网 IP 操作一个账号每天最多操作 10 次;详情可通过 [DescribeAddressQuota](https://cloud.tencent.com/document/api/213/1378) 接口获取。
:type ReallocateNormalPublicIp: bool
"""
self.AddressId = None
self.ReallocateNormalPublicIp = None
def _deserialize(self, params):
self.AddressId = params.get("AddressId")
self.ReallocateNormalPublicIp = params.get("ReallocateNormalPublicIp")
class DisassociateAddressResponse(AbstractModel):
"""DisassociateAddress返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DownloadCustomerGatewayConfigurationRequest(AbstractModel):
"""DownloadCustomerGatewayConfiguration请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param VpnConnectionId: VPN通道实例ID。形如:vpnx-f49l6u0z。
:type VpnConnectionId: str
:param CustomerGatewayVendor: 对端网关厂商信息对象,可通过DescribeCustomerGatewayVendors获取。
:type CustomerGatewayVendor: :class:`tencentcloud.vpc.v20170312.models.CustomerGatewayVendor`
:param InterfaceName: 通道接入设备物理接口名称。
:type InterfaceName: str
"""
self.VpnGatewayId = None
self.VpnConnectionId = None
self.CustomerGatewayVendor = None
self.InterfaceName = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.VpnConnectionId = params.get("VpnConnectionId")
if params.get("CustomerGatewayVendor") is not None:
self.CustomerGatewayVendor = CustomerGatewayVendor()
self.CustomerGatewayVendor._deserialize(params.get("CustomerGatewayVendor"))
self.InterfaceName = params.get("InterfaceName")
class DownloadCustomerGatewayConfigurationResponse(AbstractModel):
"""DownloadCustomerGatewayConfiguration返回参数结构体
"""
def __init__(self):
"""
:param CustomerGatewayConfiguration: XML格式配置信息。
:type CustomerGatewayConfiguration: str
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.CustomerGatewayConfiguration = None
self.RequestId = None
def _deserialize(self, params):
self.CustomerGatewayConfiguration = params.get("CustomerGatewayConfiguration")
self.RequestId = params.get("RequestId")
class Filter(AbstractModel):
"""过滤器
"""
def __init__(self):
"""
:param Name: 属性名称, 若存在多个Filter时,Filter间的关系为逻辑与(AND)关系。
:type Name: str
:param Values: 属性值, 若同一个Filter存在多个Values,同一Filter下Values间的关系为逻辑或(OR)关系。
:type Values: list of str
"""
self.Name = None
self.Values = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Values = params.get("Values")
class FilterObject(AbstractModel):
"""过滤器键值对
"""
def __init__(self):
"""
:param Name: 属性名称, 若存在多个Filter时,Filter间的关系为逻辑与(AND)关系。
:type Name: str
:param Values: 属性值, 若同一个Filter存在多个Values,同一Filter下Values间的关系为逻辑或(OR)关系。
:type Values: list of str
"""
self.Name = None
self.Values = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Values = params.get("Values")
class IKEOptionsSpecification(AbstractModel):
"""IKE配置(Internet Key Exchange,因特网密钥交换),IKE具有一套自保护机制,用户配置网络安全协议
"""
def __init__(self):
"""
:param PropoEncryAlgorithm: 加密算法,可选值:'3DES-CBC', 'AES-CBC-128', 'AES-CBS-192', 'AES-CBC-256', 'DES-CBC',默认为3DES-CBC
:type PropoEncryAlgorithm: str
:param PropoAuthenAlgorithm: 认证算法:可选值:'MD5', 'SHA1',默认为MD5
:type PropoAuthenAlgorithm: str
:param ExchangeMode: 协商模式:可选值:'AGGRESSIVE', 'MAIN',默认为MAIN
:type ExchangeMode: str
:param LocalIdentity: 本端标识类型:可选值:'ADDRESS', 'FQDN',默认为ADDRESS
:type LocalIdentity: str
:param RemoteIdentity: 对端标识类型:可选值:'ADDRESS', 'FQDN',默认为ADDRESS
:type RemoteIdentity: str
:param LocalAddress: 本端标识,当LocalIdentity选为ADDRESS时,LocalAddress必填。localAddress默认为vpn网关公网IP
:type LocalAddress: str
:param RemoteAddress: 对端标识,当RemoteIdentity选为ADDRESS时,RemoteAddress必填
:type RemoteAddress: str
:param LocalFqdnName: 本端标识,当LocalIdentity选为FQDN时,LocalFqdnName必填
:type LocalFqdnName: str
:param RemoteFqdnName: 对端标识,当remoteIdentity选为FQDN时,RemoteFqdnName必填
:type RemoteFqdnName: str
:param DhGroupName: DH group,指定IKE交换密钥时使用的DH组,可选值:'GROUP1', 'GROUP2', 'GROUP5', 'GROUP14', 'GROUP24',
:type DhGroupName: str
:param IKESaLifetimeSeconds: IKE SA Lifetime,单位:秒,设置IKE SA的生存周期,取值范围:60-604800
:type IKESaLifetimeSeconds: int
:param IKEVersion: IKE版本
:type IKEVersion: str
"""
self.PropoEncryAlgorithm = None
self.PropoAuthenAlgorithm = None
self.ExchangeMode = None
self.LocalIdentity = None
self.RemoteIdentity = None
self.LocalAddress = None
self.RemoteAddress = None
self.LocalFqdnName = None
self.RemoteFqdnName = None
self.DhGroupName = None
self.IKESaLifetimeSeconds = None
self.IKEVersion = None
def _deserialize(self, params):
self.PropoEncryAlgorithm = params.get("PropoEncryAlgorithm")
self.PropoAuthenAlgorithm = params.get("PropoAuthenAlgorithm")
self.ExchangeMode = params.get("ExchangeMode")
self.LocalIdentity = params.get("LocalIdentity")
self.RemoteIdentity = params.get("RemoteIdentity")
self.LocalAddress = params.get("LocalAddress")
self.RemoteAddress = params.get("RemoteAddress")
self.LocalFqdnName = params.get("LocalFqdnName")
self.RemoteFqdnName = params.get("RemoteFqdnName")
self.DhGroupName = params.get("DhGroupName")
self.IKESaLifetimeSeconds = params.get("IKESaLifetimeSeconds")
self.IKEVersion = params.get("IKEVersion")
class IPSECOptionsSpecification(AbstractModel):
"""IPSec配置,腾讯云提供IPSec安全会话设置
"""
def __init__(self):
"""
:param EncryptAlgorithm: 加密算法,可选值:'3DES-CBC', 'AES-CBC-128', 'AES-CBC-192', 'AES-CBC-256', 'DES-CBC', 'NULL', 默认为AES-CBC-128
:type EncryptAlgorithm: str
:param IntegrityAlgorith: 认证算法:可选值:'MD5', 'SHA1',默认为
:type IntegrityAlgorith: str
:param IPSECSaLifetimeSeconds: IPsec SA lifetime(s):单位秒,取值范围:180-604800
:type IPSECSaLifetimeSeconds: int
:param PfsDhGroup: PFS:可选值:'NULL', 'DH-GROUP1', 'DH-GROUP2', 'DH-GROUP5', 'DH-GROUP14', 'DH-GROUP24',默认为NULL
:type PfsDhGroup: str
:param IPSECSaLifetimeTraffic: IPsec SA lifetime(KB):单位KB,取值范围:2560-604800
:type IPSECSaLifetimeTraffic: int
"""
self.EncryptAlgorithm = None
self.IntegrityAlgorith = None
self.IPSECSaLifetimeSeconds = None
self.PfsDhGroup = None
self.IPSECSaLifetimeTraffic = None
def _deserialize(self, params):
self.EncryptAlgorithm = params.get("EncryptAlgorithm")
self.IntegrityAlgorith = params.get("IntegrityAlgorith")
self.IPSECSaLifetimeSeconds = params.get("IPSECSaLifetimeSeconds")
self.PfsDhGroup = params.get("PfsDhGroup")
self.IPSECSaLifetimeTraffic = params.get("IPSECSaLifetimeTraffic")
class InquiryPriceCreateVpnGatewayRequest(AbstractModel):
"""InquiryPriceCreateVpnGateway请求参数结构体
"""
def __init__(self):
"""
:param InternetMaxBandwidthOut: 公网带宽设置。可选带宽规格:5, 10, 20, 50, 100;单位:Mbps。
:type InternetMaxBandwidthOut: int
:param InstanceChargeType: VPN网关计费模式,PREPAID:表示预付费,即包年包月,POSTPAID_BY_HOUR:表示后付费,即按量计费。默认:POSTPAID_BY_HOUR,如果指定预付费模式,参数InstanceChargePrepaid必填。
:type InstanceChargeType: str
:param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月实例的购买时长、是否设置自动续费等属性。若指定实例的付费模式为预付费则该参数必传。
:type InstanceChargePrepaid: :class:`tencentcloud.vpc.v20170312.models.InstanceChargePrepaid`
"""
self.InternetMaxBandwidthOut = None
self.InstanceChargeType = None
self.InstanceChargePrepaid = None
def _deserialize(self, params):
self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut")
self.InstanceChargeType = params.get("InstanceChargeType")
if params.get("InstanceChargePrepaid") is not None:
self.InstanceChargePrepaid = InstanceChargePrepaid()
self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid"))
class InquiryPriceCreateVpnGatewayResponse(AbstractModel):
"""InquiryPriceCreateVpnGateway返回参数结构体
"""
def __init__(self):
"""
:param Price: 商品价格。
:type Price: :class:`tencentcloud.vpc.v20170312.models.Price`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.Price = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Price") is not None:
self.Price = Price()
self.Price._deserialize(params.get("Price"))
self.RequestId = params.get("RequestId")
class InquiryPriceRenewVpnGatewayRequest(AbstractModel):
"""InquiryPriceRenewVpnGateway请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param InstanceChargePrepaid: 预付费模式,即包年包月相关参数设置。通过该参数可以指定包年包月实例的购买时长、是否设置自动续费等属性。若指定实例的付费模式为预付费则该参数必传。
:type InstanceChargePrepaid: :class:`tencentcloud.vpc.v20170312.models.InstanceChargePrepaid`
"""
self.VpnGatewayId = None
self.InstanceChargePrepaid = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
if params.get("InstanceChargePrepaid") is not None:
self.InstanceChargePrepaid = InstanceChargePrepaid()
self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid"))
class InquiryPriceRenewVpnGatewayResponse(AbstractModel):
"""InquiryPriceRenewVpnGateway返回参数结构体
"""
def __init__(self):
"""
:param Price: 商品价格。
:type Price: :class:`tencentcloud.vpc.v20170312.models.Price`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.Price = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Price") is not None:
self.Price = Price()
self.Price._deserialize(params.get("Price"))
self.RequestId = params.get("RequestId")
class InquiryPriceResetVpnGatewayInternetMaxBandwidthRequest(AbstractModel):
"""InquiryPriceResetVpnGatewayInternetMaxBandwidth请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param InternetMaxBandwidthOut: 公网带宽设置。可选带宽规格:5, 10, 20, 50, 100;单位:Mbps。
:type InternetMaxBandwidthOut: int
"""
self.VpnGatewayId = None
self.InternetMaxBandwidthOut = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut")
class InquiryPriceResetVpnGatewayInternetMaxBandwidthResponse(AbstractModel):
"""InquiryPriceResetVpnGatewayInternetMaxBandwidth返回参数结构体
"""
def __init__(self):
"""
:param Price: 商品价格。
:type Price: :class:`tencentcloud.vpc.v20170312.models.Price`
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.Price = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Price") is not None:
self.Price = Price()
self.Price._deserialize(params.get("Price"))
self.RequestId = params.get("RequestId")
class InstanceChargePrepaid(AbstractModel):
"""预付费(包年包月)计费对象。
"""
def __init__(self):
"""
:param Period: 购买实例的时长,单位:月。取值范围:1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 24, 36。
:type Period: int
:param RenewFlag: 自动续费标识。取值范围: NOTIFY_AND_AUTO_RENEW:通知过期且自动续费, NOTIFY_AND_MANUAL_RENEW:通知过期不自动续费。默认:NOTIFY_AND_MANUAL_RENEW
:type RenewFlag: str
"""
self.Period = None
self.RenewFlag = None
def _deserialize(self, params):
self.Period = params.get("Period")
self.RenewFlag = params.get("RenewFlag")
class ItemPrice(AbstractModel):
"""单项计费价格信息
"""
def __init__(self):
"""
:param UnitPrice: 按量计费后付费单价,单位:元。
:type UnitPrice: float
:param ChargeUnit: 按量计费后付费计价单元,可取值范围: HOUR:表示计价单元是按每小时来计算。当前涉及该计价单元的场景有:实例按小时后付费(POSTPAID_BY_HOUR)、带宽按小时后付费(BANDWIDTH_POSTPAID_BY_HOUR): GB:表示计价单元是按每GB来计算。当前涉及该计价单元的场景有:流量按小时后付费(TRAFFIC_POSTPAID_BY_HOUR)。
:type ChargeUnit: str
:param OriginalPrice: 预付费商品的原价,单位:元。
:type OriginalPrice: float
:param DiscountPrice: 预付费商品的折扣价,单位:元。
:type DiscountPrice: float
"""
self.UnitPrice = None
self.ChargeUnit = None
self.OriginalPrice = None
self.DiscountPrice = None
def _deserialize(self, params):
self.UnitPrice = params.get("UnitPrice")
self.ChargeUnit = params.get("ChargeUnit")
self.OriginalPrice = params.get("OriginalPrice")
self.DiscountPrice = params.get("DiscountPrice")
class MigrateNetworkInterfaceRequest(AbstractModel):
"""MigrateNetworkInterface请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
:param SourceInstanceId: 弹性网卡当前绑定的CVM实例ID。形如:ins-r8hr2upy。
:type SourceInstanceId: str
:param DestinationInstanceId: 待迁移的目的CVM实例ID。
:type DestinationInstanceId: str
"""
self.NetworkInterfaceId = None
self.SourceInstanceId = None
self.DestinationInstanceId = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
self.SourceInstanceId = params.get("SourceInstanceId")
self.DestinationInstanceId = params.get("DestinationInstanceId")
class MigrateNetworkInterfaceResponse(AbstractModel):
"""MigrateNetworkInterface返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class MigratePrivateIpAddressRequest(AbstractModel):
"""MigratePrivateIpAddress请求参数结构体
"""
def __init__(self):
"""
:param SourceNetworkInterfaceId: 当内网IP绑定的弹性网卡实例ID,例如:eni-m6dyj72l。
:type SourceNetworkInterfaceId: str
:param DestinationNetworkInterfaceId: 待迁移的目的弹性网卡实例ID。
:type DestinationNetworkInterfaceId: str
:param PrivateIpAddress: 迁移的内网IP地址,例如:10.0.0.6。
:type PrivateIpAddress: str
"""
self.SourceNetworkInterfaceId = None
self.DestinationNetworkInterfaceId = None
self.PrivateIpAddress = None
def _deserialize(self, params):
self.SourceNetworkInterfaceId = params.get("SourceNetworkInterfaceId")
self.DestinationNetworkInterfaceId = params.get("DestinationNetworkInterfaceId")
self.PrivateIpAddress = params.get("PrivateIpAddress")
class MigratePrivateIpAddressResponse(AbstractModel):
"""MigratePrivateIpAddress返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyAddressAttributeRequest(AbstractModel):
"""ModifyAddressAttribute请求参数结构体
"""
def __init__(self):
"""
:param AddressId: 标识 EIP 的唯一 ID。EIP 唯一 ID 形如:`eip-11112222`。
:type AddressId: str
:param AddressName: 修改后的 EIP 名称。长度上限为20个字符。
:type AddressName: str
"""
self.AddressId = None
self.AddressName = None
def _deserialize(self, params):
self.AddressId = params.get("AddressId")
self.AddressName = params.get("AddressName")
class ModifyAddressAttributeResponse(AbstractModel):
"""ModifyAddressAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyAddressTemplateAttributeRequest(AbstractModel):
"""ModifyAddressTemplateAttribute请求参数结构体
"""
def __init__(self):
"""
:param AddressTemplateId: IP地址模板实例ID,例如:ipm-mdunqeb6。
:type AddressTemplateId: str
:param AddressTemplateName: IP地址模板名称。
:type AddressTemplateName: str
:param Addresses: 地址信息,支持 IP、CIDR、IP 范围。
:type Addresses: list of str
"""
self.AddressTemplateId = None
self.AddressTemplateName = None
self.Addresses = None
def _deserialize(self, params):
self.AddressTemplateId = params.get("AddressTemplateId")
self.AddressTemplateName = params.get("AddressTemplateName")
self.Addresses = params.get("Addresses")
class ModifyAddressTemplateAttributeResponse(AbstractModel):
"""ModifyAddressTemplateAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyAddressTemplateGroupAttributeRequest(AbstractModel):
"""ModifyAddressTemplateGroupAttribute请求参数结构体
"""
def __init__(self):
"""
:param AddressTemplateGroupId: IP地址模板集合实例ID,例如:ipmg-2uw6ujo6。
:type AddressTemplateGroupId: str
:param AddressTemplateGroupName: IP地址模板集合名称。
:type AddressTemplateGroupName: str
:param AddressTemplateIds: IP地址模板实例ID, 例如:ipm-mdunqeb6。
:type AddressTemplateIds: list of str
"""
self.AddressTemplateGroupId = None
self.AddressTemplateGroupName = None
self.AddressTemplateIds = None
def _deserialize(self, params):
self.AddressTemplateGroupId = params.get("AddressTemplateGroupId")
self.AddressTemplateGroupName = params.get("AddressTemplateGroupName")
self.AddressTemplateIds = params.get("AddressTemplateIds")
class ModifyAddressTemplateGroupAttributeResponse(AbstractModel):
"""ModifyAddressTemplateGroupAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyCustomerGatewayAttributeRequest(AbstractModel):
"""ModifyCustomerGatewayAttribute请求参数结构体
"""
def __init__(self):
"""
:param CustomerGatewayId: 对端网关ID,例如:cgw-2wqq41m9,可通过DescribeCustomerGateways接口查询对端网关。
:type CustomerGatewayId: str
:param CustomerGatewayName: 对端网关名称,可任意命名,但不得超过60个字符。
:type CustomerGatewayName: str
"""
self.CustomerGatewayId = None
self.CustomerGatewayName = None
def _deserialize(self, params):
self.CustomerGatewayId = params.get("CustomerGatewayId")
self.CustomerGatewayName = params.get("CustomerGatewayName")
class ModifyCustomerGatewayAttributeResponse(AbstractModel):
"""ModifyCustomerGatewayAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyNetworkInterfaceAttributeRequest(AbstractModel):
"""ModifyNetworkInterfaceAttribute请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-pxir56ns。
:type NetworkInterfaceId: str
:param NetworkInterfaceName: 弹性网卡名称,最大长度不能超过60个字节。
:type NetworkInterfaceName: str
:param NetworkInterfaceDescription: 弹性网卡描述,可任意命名,但不得超过60个字符。
:type NetworkInterfaceDescription: str
:param SecurityGroupIds: 指定绑定的安全组,例如:['sg-1dd51d']。
:type SecurityGroupIds: list of str
"""
self.NetworkInterfaceId = None
self.NetworkInterfaceName = None
self.NetworkInterfaceDescription = None
self.SecurityGroupIds = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
self.NetworkInterfaceName = params.get("NetworkInterfaceName")
self.NetworkInterfaceDescription = params.get("NetworkInterfaceDescription")
self.SecurityGroupIds = params.get("SecurityGroupIds")
class ModifyNetworkInterfaceAttributeResponse(AbstractModel):
"""ModifyNetworkInterfaceAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyPrivateIpAddressesAttributeRequest(AbstractModel):
"""ModifyPrivateIpAddressesAttribute请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
:param PrivateIpAddresses: 指定的内网IP信息。
:type PrivateIpAddresses: list of PrivateIpAddressSpecification
"""
self.NetworkInterfaceId = None
self.PrivateIpAddresses = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
if params.get("PrivateIpAddresses") is not None:
self.PrivateIpAddresses = []
for item in params.get("PrivateIpAddresses"):
obj = PrivateIpAddressSpecification()
obj._deserialize(item)
self.PrivateIpAddresses.append(obj)
class ModifyPrivateIpAddressesAttributeResponse(AbstractModel):
"""ModifyPrivateIpAddressesAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyRouteTableAttributeRequest(AbstractModel):
"""ModifyRouteTableAttribute请求参数结构体
"""
def __init__(self):
"""
:param RouteTableId: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableId: str
:param RouteTableName: 路由表名称。
:type RouteTableName: str
"""
self.RouteTableId = None
self.RouteTableName = None
def _deserialize(self, params):
self.RouteTableId = params.get("RouteTableId")
self.RouteTableName = params.get("RouteTableName")
class ModifyRouteTableAttributeResponse(AbstractModel):
"""ModifyRouteTableAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifySecurityGroupAttributeRequest(AbstractModel):
"""ModifySecurityGroupAttribute请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
:param GroupName: 安全组名称,可任意命名,但不得超过60个字符。
:type GroupName: str
:param GroupDescription: 安全组备注,最多100个字符。
:type GroupDescription: str
"""
self.SecurityGroupId = None
self.GroupName = None
self.GroupDescription = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
self.GroupName = params.get("GroupName")
self.GroupDescription = params.get("GroupDescription")
class ModifySecurityGroupAttributeResponse(AbstractModel):
"""ModifySecurityGroupAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifySecurityGroupPoliciesRequest(AbstractModel):
"""ModifySecurityGroupPolicies请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
:param SecurityGroupPolicySet: 安全组规则集合。 SecurityGroupPolicySet对象必须同时指定新的出(Egress)入(Ingress)站规则。 SecurityGroupPolicy对象不支持自定义索引(PolicyIndex)。
:type SecurityGroupPolicySet: :class:`tencentcloud.vpc.v20170312.models.SecurityGroupPolicySet`
"""
self.SecurityGroupId = None
self.SecurityGroupPolicySet = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
if params.get("SecurityGroupPolicySet") is not None:
self.SecurityGroupPolicySet = SecurityGroupPolicySet()
self.SecurityGroupPolicySet._deserialize(params.get("SecurityGroupPolicySet"))
class ModifySecurityGroupPoliciesResponse(AbstractModel):
"""ModifySecurityGroupPolicies返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyServiceTemplateAttributeRequest(AbstractModel):
"""ModifyServiceTemplateAttribute请求参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateId: 协议端口模板实例ID,例如:ppm-529nwwj8。
:type ServiceTemplateId: str
:param ServiceTemplateName: 协议端口模板名称。
:type ServiceTemplateName: str
:param Services: 支持单个端口、多个端口、连续端口及所有端口,协议支持:TCP、UDP、ICMP、GRE 协议。
:type Services: list of str
"""
self.ServiceTemplateId = None
self.ServiceTemplateName = None
self.Services = None
def _deserialize(self, params):
self.ServiceTemplateId = params.get("ServiceTemplateId")
self.ServiceTemplateName = params.get("ServiceTemplateName")
self.Services = params.get("Services")
class ModifyServiceTemplateAttributeResponse(AbstractModel):
"""ModifyServiceTemplateAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyServiceTemplateGroupAttributeRequest(AbstractModel):
"""ModifyServiceTemplateGroupAttribute请求参数结构体
"""
def __init__(self):
"""
:param ServiceTemplateGroupId: 协议端口模板集合实例ID,例如:ppmg-ei8hfd9a。
:type ServiceTemplateGroupId: str
:param ServiceTemplateGroupName: 协议端口模板集合名称。
:type ServiceTemplateGroupName: str
:param ServiceTemplateIds: 协议端口模板实例ID,例如:ppm-4dw6agho。
:type ServiceTemplateIds: list of str
"""
self.ServiceTemplateGroupId = None
self.ServiceTemplateGroupName = None
self.ServiceTemplateIds = None
def _deserialize(self, params):
self.ServiceTemplateGroupId = params.get("ServiceTemplateGroupId")
self.ServiceTemplateGroupName = params.get("ServiceTemplateGroupName")
self.ServiceTemplateIds = params.get("ServiceTemplateIds")
class ModifyServiceTemplateGroupAttributeResponse(AbstractModel):
"""ModifyServiceTemplateGroupAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifySubnetAttributeRequest(AbstractModel):
"""ModifySubnetAttribute请求参数结构体
"""
def __init__(self):
"""
:param SubnetId: 子网实例ID。形如:subnet-pxir56ns。
:type SubnetId: str
:param SubnetName: 子网名称,最大长度不能超过60个字节。
:type SubnetName: str
:param EnableBroadcast: 子网是否开启广播。
:type EnableBroadcast: str
"""
self.SubnetId = None
self.SubnetName = None
self.EnableBroadcast = None
def _deserialize(self, params):
self.SubnetId = params.get("SubnetId")
self.SubnetName = params.get("SubnetName")
self.EnableBroadcast = params.get("EnableBroadcast")
class ModifySubnetAttributeResponse(AbstractModel):
"""ModifySubnetAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyVpcAttributeRequest(AbstractModel):
"""ModifyVpcAttribute请求参数结构体
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。形如:vpc-f49l6u0z。每次请求的实例的上限为100。参数不支持同时指定VpcIds和Filters。
:type VpcId: str
:param VpcName: 私有网络名称,可任意命名,但不得超过60个字符。
:type VpcName: str
:param EnableMulticast: 是否开启组播。true: 开启, false: 关闭。
:type EnableMulticast: str
:param DnsServers: DNS地址,最多支持4个,第1个默认为主,其余为备
:type DnsServers: list of str
:param DomainName: 域名
:type DomainName: str
"""
self.VpcId = None
self.VpcName = None
self.EnableMulticast = None
self.DnsServers = None
self.DomainName = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.VpcName = params.get("VpcName")
self.EnableMulticast = params.get("EnableMulticast")
self.DnsServers = params.get("DnsServers")
self.DomainName = params.get("DomainName")
class ModifyVpcAttributeResponse(AbstractModel):
"""ModifyVpcAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyVpnConnectionAttributeRequest(AbstractModel):
"""ModifyVpnConnectionAttribute请求参数结构体
"""
def __init__(self):
"""
:param VpnConnectionId: VPN通道实例ID。形如:vpnx-f49l6u0z。
:type VpnConnectionId: str
:param VpnConnectionName: VPN通道名称,可任意命名,但不得超过60个字符。
:type VpnConnectionName: str
:param PreShareKey: 预共享密钥。
:type PreShareKey: str
:param SecurityPolicyDatabases: SPD策略组,例如:{"10.0.0.5/24":["172.16.31.10/16"]},10.0.0.5/24是vpc内网段172.16.31.10/16是IDC网段。用户指定VPC内哪些网段可以和您IDC中哪些网段通信。
:type SecurityPolicyDatabases: list of SecurityPolicyDatabase
:param IKEOptionsSpecification: IKE配置(Internet Key Exchange,因特网密钥交换),IKE具有一套自保护机制,用户配置网络安全协议。
:type IKEOptionsSpecification: :class:`tencentcloud.vpc.v20170312.models.IKEOptionsSpecification`
:param IPSECOptionsSpecification: IPSec配置,腾讯云提供IPSec安全会话设置。
:type IPSECOptionsSpecification: :class:`tencentcloud.vpc.v20170312.models.IPSECOptionsSpecification`
"""
self.VpnConnectionId = None
self.VpnConnectionName = None
self.PreShareKey = None
self.SecurityPolicyDatabases = None
self.IKEOptionsSpecification = None
self.IPSECOptionsSpecification = None
def _deserialize(self, params):
self.VpnConnectionId = params.get("VpnConnectionId")
self.VpnConnectionName = params.get("VpnConnectionName")
self.PreShareKey = params.get("PreShareKey")
if params.get("SecurityPolicyDatabases") is not None:
self.SecurityPolicyDatabases = []
for item in params.get("SecurityPolicyDatabases"):
obj = SecurityPolicyDatabase()
obj._deserialize(item)
self.SecurityPolicyDatabases.append(obj)
if params.get("IKEOptionsSpecification") is not None:
self.IKEOptionsSpecification = IKEOptionsSpecification()
self.IKEOptionsSpecification._deserialize(params.get("IKEOptionsSpecification"))
if params.get("IPSECOptionsSpecification") is not None:
self.IPSECOptionsSpecification = IPSECOptionsSpecification()
self.IPSECOptionsSpecification._deserialize(params.get("IPSECOptionsSpecification"))
class ModifyVpnConnectionAttributeResponse(AbstractModel):
"""ModifyVpnConnectionAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyVpnGatewayAttributeRequest(AbstractModel):
"""ModifyVpnGatewayAttribute请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param VpnGatewayName: VPN网关名称,最大长度不能超过60个字节。
:type VpnGatewayName: str
:param InstanceChargeType: VPN网关计费模式,目前只支持预付费(即包年包月)到后付费(即按量计费)的转换。即参数只支持:POSTPAID_BY_HOUR。
:type InstanceChargeType: str
"""
self.VpnGatewayId = None
self.VpnGatewayName = None
self.InstanceChargeType = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.VpnGatewayName = params.get("VpnGatewayName")
self.InstanceChargeType = params.get("InstanceChargeType")
class ModifyVpnGatewayAttributeResponse(AbstractModel):
"""ModifyVpnGatewayAttribute返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class NetworkInterface(AbstractModel):
"""弹性网卡
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-f1xjkw1b。
:type NetworkInterfaceId: str
:param NetworkInterfaceName: 弹性网卡名称。
:type NetworkInterfaceName: str
:param NetworkInterfaceDescription: 弹性网卡描述。
:type NetworkInterfaceDescription: str
:param SubnetId: 子网实例ID。
:type SubnetId: str
:param VpcId: VPC实例ID。
:type VpcId: str
:param GroupSet: 绑定的安全组。
:type GroupSet: list of str
:param Primary: 是否是主网卡。
:type Primary: bool
:param MacAddress: MAC地址。
:type MacAddress: str
:param State: 取值范围:PENDING|AVAILABLE|ATTACHING|DETACHING|DELETING。
:type State: str
:param PrivateIpAddressSet: 内网IP信息。
:type PrivateIpAddressSet: list of PrivateIpAddressSpecification
:param Attachment: 绑定的云服务器对象。
:type Attachment: :class:`tencentcloud.vpc.v20170312.models.NetworkInterfaceAttachment`
:param Zone: 可用区。
:type Zone: str
:param CreatedTime: 创建时间。
:type CreatedTime: str
"""
self.NetworkInterfaceId = None
self.NetworkInterfaceName = None
self.NetworkInterfaceDescription = None
self.SubnetId = None
self.VpcId = None
self.GroupSet = None
self.Primary = None
self.MacAddress = None
self.State = None
self.PrivateIpAddressSet = None
self.Attachment = None
self.Zone = None
self.CreatedTime = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
self.NetworkInterfaceName = params.get("NetworkInterfaceName")
self.NetworkInterfaceDescription = params.get("NetworkInterfaceDescription")
self.SubnetId = params.get("SubnetId")
self.VpcId = params.get("VpcId")
self.GroupSet = params.get("GroupSet")
self.Primary = params.get("Primary")
self.MacAddress = params.get("MacAddress")
self.State = params.get("State")
if params.get("PrivateIpAddressSet") is not None:
self.PrivateIpAddressSet = []
for item in params.get("PrivateIpAddressSet"):
obj = PrivateIpAddressSpecification()
obj._deserialize(item)
self.PrivateIpAddressSet.append(obj)
if params.get("Attachment") is not None:
self.Attachment = NetworkInterfaceAttachment()
self.Attachment._deserialize(params.get("Attachment"))
self.Zone = params.get("Zone")
self.CreatedTime = params.get("CreatedTime")
class NetworkInterfaceAttachment(AbstractModel):
"""弹性网卡绑定关系
"""
def __init__(self):
"""
:param InstanceId: 云主机实例ID。
:type InstanceId: str
:param DeviceIndex: 网卡在云主机实例内的序号。
:type DeviceIndex: int
:param InstanceAccountId: 云主机所有者账户信息。
:type InstanceAccountId: str
:param AttachTime: 绑定时间。
:type AttachTime: str
"""
self.InstanceId = None
self.DeviceIndex = None
self.InstanceAccountId = None
self.AttachTime = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.DeviceIndex = params.get("DeviceIndex")
self.InstanceAccountId = params.get("InstanceAccountId")
self.AttachTime = params.get("AttachTime")
class Price(AbstractModel):
"""价格
"""
def __init__(self):
"""
:param InstancePrice: 实例价格。
:type InstancePrice: :class:`tencentcloud.vpc.v20170312.models.ItemPrice`
:param BandwidthPrice: 网络价格。
:type BandwidthPrice: :class:`tencentcloud.vpc.v20170312.models.ItemPrice`
"""
self.InstancePrice = None
self.BandwidthPrice = None
def _deserialize(self, params):
if params.get("InstancePrice") is not None:
self.InstancePrice = ItemPrice()
self.InstancePrice._deserialize(params.get("InstancePrice"))
if params.get("BandwidthPrice") is not None:
self.BandwidthPrice = ItemPrice()
self.BandwidthPrice._deserialize(params.get("BandwidthPrice"))
class PrivateIpAddressSpecification(AbstractModel):
"""内网IP信息
"""
def __init__(self):
"""
:param PrivateIpAddress: 内网IP地址。
:type PrivateIpAddress: str
:param Primary: 是否是主IP。
:type Primary: bool
:param PublicIpAddress: 公网IP地址。
:type PublicIpAddress: str
:param AddressId: EIP实例ID,例如:eip-11112222。
:type AddressId: str
:param Description: 内网IP描述信息。
:type Description: str
:param IsWanIpBlocked: 公网IP是否被封堵。
:type IsWanIpBlocked: bool
"""
self.PrivateIpAddress = None
self.Primary = None
self.PublicIpAddress = None
self.AddressId = None
self.Description = None
self.IsWanIpBlocked = None
def _deserialize(self, params):
self.PrivateIpAddress = params.get("PrivateIpAddress")
self.Primary = params.get("Primary")
self.PublicIpAddress = params.get("PublicIpAddress")
self.AddressId = params.get("AddressId")
self.Description = params.get("Description")
self.IsWanIpBlocked = params.get("IsWanIpBlocked")
class Quota(AbstractModel):
"""描述了配额信息
"""
def __init__(self):
"""
:param QuotaId: 配额名称,取值范围:<br><li>`TOTAL_EIP_QUOTA`:用户当前地域下EIP的配额数;<br><li>`DAILY_EIP_APPLY`:用户当前地域下今日申购次数;<br><li>`DAILY_PUBLIC_IP_ASSIGN`:用户当前地域下,重新分配公网 IP次数。
:type QuotaId: str
:param QuotaCurrent: 当前数量
:type QuotaCurrent: int
:param QuotaLimit: 配额数量
:type QuotaLimit: int
"""
self.QuotaId = None
self.QuotaCurrent = None
self.QuotaLimit = None
def _deserialize(self, params):
self.QuotaId = params.get("QuotaId")
self.QuotaCurrent = params.get("QuotaCurrent")
self.QuotaLimit = params.get("QuotaLimit")
class ReleaseAddressesRequest(AbstractModel):
"""ReleaseAddresses请求参数结构体
"""
def __init__(self):
"""
:param AddressIds: 标识 EIP 的唯一 ID 列表。EIP 唯一 ID 形如:`eip-11112222`。
:type AddressIds: list of str
"""
self.AddressIds = None
def _deserialize(self, params):
self.AddressIds = params.get("AddressIds")
class ReleaseAddressesResponse(AbstractModel):
"""ReleaseAddresses返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class RenewVpnGatewayRequest(AbstractModel):
"""RenewVpnGateway请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param InstanceChargePrepaid: 预付费计费模式。
:type InstanceChargePrepaid: :class:`tencentcloud.vpc.v20170312.models.InstanceChargePrepaid`
"""
self.VpnGatewayId = None
self.InstanceChargePrepaid = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
if params.get("InstanceChargePrepaid") is not None:
self.InstanceChargePrepaid = InstanceChargePrepaid()
self.InstanceChargePrepaid._deserialize(params.get("InstanceChargePrepaid"))
class RenewVpnGatewayResponse(AbstractModel):
"""RenewVpnGateway返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ReplaceRouteTableAssociationRequest(AbstractModel):
"""ReplaceRouteTableAssociation请求参数结构体
"""
def __init__(self):
"""
:param SubnetId: 子网实例ID,例如:subnet-3x5lf5q0。可通过DescribeSubnets接口查询。
:type SubnetId: str
:param RouteTableId: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableId: str
"""
self.SubnetId = None
self.RouteTableId = None
def _deserialize(self, params):
self.SubnetId = params.get("SubnetId")
self.RouteTableId = params.get("RouteTableId")
class ReplaceRouteTableAssociationResponse(AbstractModel):
"""ReplaceRouteTableAssociation返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ReplaceRoutesRequest(AbstractModel):
"""ReplaceRoutes请求参数结构体
"""
def __init__(self):
"""
:param RouteTableId: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableId: str
:param Routes: 路由策略对象。只需要指定路由策略ID(RouteId)。
:type Routes: list of Route
"""
self.RouteTableId = None
self.Routes = None
def _deserialize(self, params):
self.RouteTableId = params.get("RouteTableId")
if params.get("Routes") is not None:
self.Routes = []
for item in params.get("Routes"):
obj = Route()
obj._deserialize(item)
self.Routes.append(obj)
class ReplaceRoutesResponse(AbstractModel):
"""ReplaceRoutes返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ReplaceSecurityGroupPolicyRequest(AbstractModel):
"""ReplaceSecurityGroupPolicy请求参数结构体
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如sg-33ocnj9n,可通过DescribeSecurityGroups获取。
:type SecurityGroupId: str
:param SecurityGroupPolicySet: 安全组规则集合对象。
:type SecurityGroupPolicySet: :class:`tencentcloud.vpc.v20170312.models.SecurityGroupPolicySet`
"""
self.SecurityGroupId = None
self.SecurityGroupPolicySet = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
if params.get("SecurityGroupPolicySet") is not None:
self.SecurityGroupPolicySet = SecurityGroupPolicySet()
self.SecurityGroupPolicySet._deserialize(params.get("SecurityGroupPolicySet"))
class ReplaceSecurityGroupPolicyResponse(AbstractModel):
"""ReplaceSecurityGroupPolicy返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ResetRoutesRequest(AbstractModel):
"""ResetRoutes请求参数结构体
"""
def __init__(self):
"""
:param RouteTableId: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableId: str
:param RouteTableName: 路由表名称,最大长度不能超过60个字节。
:type RouteTableName: str
:param Routes: 路由策略。
:type Routes: list of Route
"""
self.RouteTableId = None
self.RouteTableName = None
self.Routes = None
def _deserialize(self, params):
self.RouteTableId = params.get("RouteTableId")
self.RouteTableName = params.get("RouteTableName")
if params.get("Routes") is not None:
self.Routes = []
for item in params.get("Routes"):
obj = Route()
obj._deserialize(item)
self.Routes.append(obj)
class ResetRoutesResponse(AbstractModel):
"""ResetRoutes返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ResetVpnConnectionRequest(AbstractModel):
"""ResetVpnConnection请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param VpnConnectionId: VPN通道实例ID。形如:vpnx-f49l6u0z。
:type VpnConnectionId: str
"""
self.VpnGatewayId = None
self.VpnConnectionId = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.VpnConnectionId = params.get("VpnConnectionId")
class ResetVpnConnectionResponse(AbstractModel):
"""ResetVpnConnection返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ResetVpnGatewayInternetMaxBandwidthRequest(AbstractModel):
"""ResetVpnGatewayInternetMaxBandwidth请求参数结构体
"""
def __init__(self):
"""
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param InternetMaxBandwidthOut: 公网带宽设置。可选带宽规格:5, 10, 20, 50, 100;单位:Mbps。
:type InternetMaxBandwidthOut: int
"""
self.VpnGatewayId = None
self.InternetMaxBandwidthOut = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut")
class ResetVpnGatewayInternetMaxBandwidthResponse(AbstractModel):
"""ResetVpnGatewayInternetMaxBandwidth返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class Route(AbstractModel):
"""路由策略对象
"""
def __init__(self):
"""
:param DestinationCidrBlock: 目的网段,取值不能在私有网络网段内,例如:192.168.127.12/24。
:type DestinationCidrBlock: str
:param GatewayType: 下一跳类型,目前我们支持的类型有:CVM:公网网关类型的云主机;VPN:vpn网关; DIRECTCONNECT:专线网关;PEERCONNECTION:对等连接;SSLVPN:sslvpn网关;NAT:nat网关; NORMAL_CVM:普通云主机。
:type GatewayType: str
:param GatewayId: 下一跳地址,这里只需要指定不同下一跳类型的网关ID,系统会自动匹配到下一跳地址。
:type GatewayId: str
:param RouteId: 路由策略ID。
:type RouteId: int
:param RouteDescription: 路由策略描述。
:type RouteDescription: str
:param Enabled: 是否启用
:type Enabled: bool
"""
self.DestinationCidrBlock = None
self.GatewayType = None
self.GatewayId = None
self.RouteId = None
self.RouteDescription = None
self.Enabled = None
def _deserialize(self, params):
self.DestinationCidrBlock = params.get("DestinationCidrBlock")
self.GatewayType = params.get("GatewayType")
self.GatewayId = params.get("GatewayId")
self.RouteId = params.get("RouteId")
self.RouteDescription = params.get("RouteDescription")
self.Enabled = params.get("Enabled")
class RouteTable(AbstractModel):
"""路由表对象
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。
:type VpcId: str
:param RouteTableId: 路由表实例ID,例如:rtb-azd4dt1c。
:type RouteTableId: str
:param RouteTableName: 路由表名称。
:type RouteTableName: str
:param AssociationSet: 路由表关联关系。
:type AssociationSet: list of RouteTableAssociation
:param RouteSet: 路由表策略集合。
:type RouteSet: list of Route
:param Main: 是否默认路由表。
:type Main: bool
:param CreatedTime: 创建时间。
:type CreatedTime: str
"""
self.VpcId = None
self.RouteTableId = None
self.RouteTableName = None
self.AssociationSet = None
self.RouteSet = None
self.Main = None
self.CreatedTime = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.RouteTableId = params.get("RouteTableId")
self.RouteTableName = params.get("RouteTableName")
if params.get("AssociationSet") is not None:
self.AssociationSet = []
for item in params.get("AssociationSet"):
obj = RouteTableAssociation()
obj._deserialize(item)
self.AssociationSet.append(obj)
if params.get("RouteSet") is not None:
self.RouteSet = []
for item in params.get("RouteSet"):
obj = Route()
obj._deserialize(item)
self.RouteSet.append(obj)
self.Main = params.get("Main")
self.CreatedTime = params.get("CreatedTime")
class RouteTableAssociation(AbstractModel):
"""路由表关联关系
"""
def __init__(self):
"""
:param SubnetId: 子网实例ID。
:type SubnetId: str
:param RouteTableId: 路由表实例ID。
:type RouteTableId: str
"""
self.SubnetId = None
self.RouteTableId = None
def _deserialize(self, params):
self.SubnetId = params.get("SubnetId")
self.RouteTableId = params.get("RouteTableId")
class SecurityGroup(AbstractModel):
"""安全组对象
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID,例如:sg-ohuuioma。
:type SecurityGroupId: str
:param SecurityGroupName: 安全组名称,可任意命名,但不得超过60个字符。
:type SecurityGroupName: str
:param SecurityGroupDesc: 安全组备注,最多100个字符。
:type SecurityGroupDesc: str
:param ProjectId: 项目id,默认0。可在qcloud控制台项目管理页面查询到。
:type ProjectId: str
:param IsDefault: 是否是默认安全组,默认安全组不支持删除。
:type IsDefault: bool
:param CreatedTime: 安全组创建时间。
:type CreatedTime: str
"""
self.SecurityGroupId = None
self.SecurityGroupName = None
self.SecurityGroupDesc = None
self.ProjectId = None
self.IsDefault = None
self.CreatedTime = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
self.SecurityGroupName = params.get("SecurityGroupName")
self.SecurityGroupDesc = params.get("SecurityGroupDesc")
self.ProjectId = params.get("ProjectId")
self.IsDefault = params.get("IsDefault")
self.CreatedTime = params.get("CreatedTime")
class SecurityGroupAssociationStatistics(AbstractModel):
"""安全组关联的实例统计
"""
def __init__(self):
"""
:param SecurityGroupId: 安全组实例ID。
:type SecurityGroupId: str
:param CVM: 云主机实例数。
:type CVM: int
:param CDB: 数据库实例数。
:type CDB: int
:param ENI: 弹性网卡实例数。
:type ENI: int
:param SG: 被安全组引用数。
:type SG: int
:param CLB: 负载均衡实例数。
:type CLB: int
"""
self.SecurityGroupId = None
self.CVM = None
self.CDB = None
self.ENI = None
self.SG = None
self.CLB = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
self.CVM = params.get("CVM")
self.CDB = params.get("CDB")
self.ENI = params.get("ENI")
self.SG = params.get("SG")
self.CLB = params.get("CLB")
class SecurityGroupPolicy(AbstractModel):
"""安全组规则对象
"""
def __init__(self):
"""
:param PolicyIndex: 安全组规则索引号。
:type PolicyIndex: int
:param Protocol: 协议, 取值: TCP,UDP, ICMP。
:type Protocol: str
:param Port: 端口(all, 离散port, range)。
:type Port: str
:param ServiceTemplate: 协议端口ID或者协议端口组ID。ServiceTemplate和Protocol+Port互斥。
:type ServiceTemplate: list of str
:param CidrBlock: 网段或IP(互斥)。
:type CidrBlock: str
:param SecurityGroupId: 已绑定安全组的网段或IP。
:type SecurityGroupId: str
:param AddressTemplate: IP地址ID或者ID地址组ID。
:type AddressTemplate: str
:param Action: ACCEPT 或 DROP。
:type Action: str
:param PolicyDescription: 安全组规则描述。
:type PolicyDescription: str
"""
self.PolicyIndex = None
self.Protocol = None
self.Port = None
self.ServiceTemplate = None
self.CidrBlock = None
self.SecurityGroupId = None
self.AddressTemplate = None
self.Action = None
self.PolicyDescription = None
def _deserialize(self, params):
self.PolicyIndex = params.get("PolicyIndex")
self.Protocol = params.get("Protocol")
self.Port = params.get("Port")
self.ServiceTemplate = params.get("ServiceTemplate")
self.CidrBlock = params.get("CidrBlock")
self.SecurityGroupId = params.get("SecurityGroupId")
self.AddressTemplate = params.get("AddressTemplate")
self.Action = params.get("Action")
self.PolicyDescription = params.get("PolicyDescription")
class SecurityGroupPolicySet(AbstractModel):
"""安全组规则集合
"""
def __init__(self):
"""
:param Version: 安全组规则当前版本。用户每次更新安全规则版本会自动加1,防止更新的路由规则已过期,不填不考虑冲突。
:type Version: str
:param Egress: 出站规则。
:type Egress: list of SecurityGroupPolicy
:param Ingress: 入站规则。
:type Ingress: list of SecurityGroupPolicy
"""
self.Version = None
self.Egress = None
self.Ingress = None
def _deserialize(self, params):
self.Version = params.get("Version")
if params.get("Egress") is not None:
self.Egress = []
for item in params.get("Egress"):
obj = SecurityGroupPolicy()
obj._deserialize(item)
self.Egress.append(obj)
if params.get("Ingress") is not None:
self.Ingress = []
for item in params.get("Ingress"):
obj = SecurityGroupPolicy()
obj._deserialize(item)
self.Ingress.append(obj)
class SecurityPolicyDatabase(AbstractModel):
"""SecurityPolicyDatabase策略
"""
def __init__(self):
"""
:param LocalCidrBlock: 本端网段
:type LocalCidrBlock: str
:param RemoteCidrBlock: 对端网段
:type RemoteCidrBlock: list of str
"""
self.LocalCidrBlock = None
self.RemoteCidrBlock = None
def _deserialize(self, params):
self.LocalCidrBlock = params.get("LocalCidrBlock")
self.RemoteCidrBlock = params.get("RemoteCidrBlock")
class ServiceTemplate(AbstractModel):
"""协议端口模板
"""
def __init__(self):
"""
:param ServiceTemplateId: 协议端口实例ID,例如:ppm-f5n1f8da。
:type ServiceTemplateId: str
:param ServiceTemplateName: 模板名称。
:type ServiceTemplateName: str
:param ServiceSet: 协议端口信息。
:type ServiceSet: list of str
:param CreatedTime: 创建时间。
:type CreatedTime: str
"""
self.ServiceTemplateId = None
self.ServiceTemplateName = None
self.ServiceSet = None
self.CreatedTime = None
def _deserialize(self, params):
self.ServiceTemplateId = params.get("ServiceTemplateId")
self.ServiceTemplateName = params.get("ServiceTemplateName")
self.ServiceSet = params.get("ServiceSet")
self.CreatedTime = params.get("CreatedTime")
class ServiceTemplateGroup(AbstractModel):
"""协议端口模板集合
"""
def __init__(self):
"""
:param ServiceTemplateGroupId: 协议端口模板集合实例ID,例如:ppmg-2klmrefu。
:type ServiceTemplateGroupId: str
:param ServiceTemplateGroupName: 协议端口模板集合名称。
:type ServiceTemplateGroupName: str
:param ServiceTemplateIdSet: 协议端口模板实例ID。
:type ServiceTemplateIdSet: list of str
:param CreatedTime: 创建时间。
:type CreatedTime: str
"""
self.ServiceTemplateGroupId = None
self.ServiceTemplateGroupName = None
self.ServiceTemplateIdSet = None
self.CreatedTime = None
def _deserialize(self, params):
self.ServiceTemplateGroupId = params.get("ServiceTemplateGroupId")
self.ServiceTemplateGroupName = params.get("ServiceTemplateGroupName")
self.ServiceTemplateIdSet = params.get("ServiceTemplateIdSet")
self.CreatedTime = params.get("CreatedTime")
class Subnet(AbstractModel):
"""子网对象
"""
def __init__(self):
"""
:param VpcId: VPC实例ID。
:type VpcId: str
:param SubnetId: 子网实例ID,例如:subnet-bthucmmy。
:type SubnetId: str
:param SubnetName: 子网名称。
:type SubnetName: str
:param CidrBlock: 子网的CIDR。
:type CidrBlock: str
:param IsDefault: 是否默认子网。
:type IsDefault: bool
:param EnableBroadcast: 是否开启广播。
:type EnableBroadcast: bool
:param Zone: 可用区。
:type Zone: str
:param RouteTableId: 路由表实例ID,例如:rtb-l2h8d7c2。
:type RouteTableId: str
:param CreatedTime: 创建时间。
:type CreatedTime: str
:param AvailableIpAddressCount: 可用IP数。
:type AvailableIpAddressCount: int
"""
self.VpcId = None
self.SubnetId = None
self.SubnetName = None
self.CidrBlock = None
self.IsDefault = None
self.EnableBroadcast = None
self.Zone = None
self.RouteTableId = None
self.CreatedTime = None
self.AvailableIpAddressCount = None
def _deserialize(self, params):
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.SubnetName = params.get("SubnetName")
self.CidrBlock = params.get("CidrBlock")
self.IsDefault = params.get("IsDefault")
self.EnableBroadcast = params.get("EnableBroadcast")
self.Zone = params.get("Zone")
self.RouteTableId = params.get("RouteTableId")
self.CreatedTime = params.get("CreatedTime")
self.AvailableIpAddressCount = params.get("AvailableIpAddressCount")
class TransformAddressRequest(AbstractModel):
"""TransformAddress请求参数结构体
"""
def __init__(self):
"""
:param InstanceId: 待操作有普通公网 IP 的实例 ID。实例 ID 形如:`ins-11112222`。可通过登录[控制台](https://console.cloud.tencent.com/cvm)查询,也可通过 [DescribeInstances](https://cloud.tencent.com/document/api/213/9389) 接口返回值中的`InstanceId`获取。
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
class TransformAddressResponse(AbstractModel):
"""TransformAddress返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class UnassignPrivateIpAddressesRequest(AbstractModel):
"""UnassignPrivateIpAddresses请求参数结构体
"""
def __init__(self):
"""
:param NetworkInterfaceId: 弹性网卡实例ID,例如:eni-m6dyj72l。
:type NetworkInterfaceId: str
:param PrivateIpAddresses: 指定的内网IP信息。
:type PrivateIpAddresses: list of PrivateIpAddressSpecification
"""
self.NetworkInterfaceId = None
self.PrivateIpAddresses = None
def _deserialize(self, params):
self.NetworkInterfaceId = params.get("NetworkInterfaceId")
if params.get("PrivateIpAddresses") is not None:
self.PrivateIpAddresses = []
for item in params.get("PrivateIpAddresses"):
obj = PrivateIpAddressSpecification()
obj._deserialize(item)
self.PrivateIpAddresses.append(obj)
class UnassignPrivateIpAddressesResponse(AbstractModel):
"""UnassignPrivateIpAddresses返回参数结构体
"""
def __init__(self):
"""
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class Vpc(AbstractModel):
"""私有网络(VPC)对象。
"""
def __init__(self):
"""
:param VpcName: Vpc名称。
:type VpcName: str
:param VpcId: VPC实例ID,例如:vpc-azd4dt1c。
:type VpcId: str
:param CidrBlock: VPC的cidr,只能为10.0.0.0/16,172.16.0.0/12,192.168.0.0/16这三个内网网段内。
:type CidrBlock: str
:param IsDefault: 是否默认VPC。
:type IsDefault: bool
:param EnableMulticast: 是否开启组播。
:type EnableMulticast: bool
:param CreatedTime: 创建时间。
:type CreatedTime: str
:param DnsServerSet: DNS列表
:type DnsServerSet: list of str
:param DomainName: DHCP域名选项值
:type DomainName: str
:param DhcpOptionsId: DHCP选项集ID
:type DhcpOptionsId: str
"""
self.VpcName = None
self.VpcId = None
self.CidrBlock = None
self.IsDefault = None
self.EnableMulticast = None
self.CreatedTime = None
self.DnsServerSet = None
self.DomainName = None
self.DhcpOptionsId = None
def _deserialize(self, params):
self.VpcName = params.get("VpcName")
self.VpcId = params.get("VpcId")
self.CidrBlock = params.get("CidrBlock")
self.IsDefault = params.get("IsDefault")
self.EnableMulticast = params.get("EnableMulticast")
self.CreatedTime = params.get("CreatedTime")
self.DnsServerSet = params.get("DnsServerSet")
self.DomainName = params.get("DomainName")
self.DhcpOptionsId = params.get("DhcpOptionsId")
class VpnConnection(AbstractModel):
"""VPN通道对象。
"""
def __init__(self):
"""
:param VpnConnectionId: 通道实例ID。
:type VpnConnectionId: str
:param VpnConnectionName: 通道名称。
:type VpnConnectionName: str
:param VpcId: VPC实例ID。
:type VpcId: str
:param VpnGatewayId: VPN网关实例ID。
:type VpnGatewayId: str
:param CustomerGatewayId: 对端网关实例ID。
:type CustomerGatewayId: str
:param PreShareKey: 预共享密钥。
:type PreShareKey: str
:param VpnProto: 通道传输协议。
:type VpnProto: str
:param EncryptProto: 通道加密协议。
:type EncryptProto: str
:param RouteType: 路由类型。
:type RouteType: str
:param CreatedTime: 创建时间。
:type CreatedTime: str
:param State: 通道的生产状态,PENDING:生产中,AVAILABLE:运行中,DELETING:删除中。
:type State: str
:param NetStatus: 通道连接状态,AVAILABLE:已连接。
:type NetStatus: str
:param SecurityPolicyDatabaseSet: SPD。
:type SecurityPolicyDatabaseSet: list of SecurityPolicyDatabase
:param IKEOptionsSpecification: IKE选项。
:type IKEOptionsSpecification: :class:`tencentcloud.vpc.v20170312.models.IKEOptionsSpecification`
:param IPSECOptionsSpecification: IPSEC选择。
:type IPSECOptionsSpecification: :class:`tencentcloud.vpc.v20170312.models.IPSECOptionsSpecification`
"""
self.VpnConnectionId = None
self.VpnConnectionName = None
self.VpcId = None
self.VpnGatewayId = None
self.CustomerGatewayId = None
self.PreShareKey = None
self.VpnProto = None
self.EncryptProto = None
self.RouteType = None
self.CreatedTime = None
self.State = None
self.NetStatus = None
self.SecurityPolicyDatabaseSet = None
self.IKEOptionsSpecification = None
self.IPSECOptionsSpecification = None
def _deserialize(self, params):
self.VpnConnectionId = params.get("VpnConnectionId")
self.VpnConnectionName = params.get("VpnConnectionName")
self.VpcId = params.get("VpcId")
self.VpnGatewayId = params.get("VpnGatewayId")
self.CustomerGatewayId = params.get("CustomerGatewayId")
self.PreShareKey = params.get("PreShareKey")
self.VpnProto = params.get("VpnProto")
self.EncryptProto = params.get("EncryptProto")
self.RouteType = params.get("RouteType")
self.CreatedTime = params.get("CreatedTime")
self.State = params.get("State")
self.NetStatus = params.get("NetStatus")
if params.get("SecurityPolicyDatabaseSet") is not None:
self.SecurityPolicyDatabaseSet = []
for item in params.get("SecurityPolicyDatabaseSet"):
obj = SecurityPolicyDatabase()
obj._deserialize(item)
self.SecurityPolicyDatabaseSet.append(obj)
if params.get("IKEOptionsSpecification") is not None:
self.IKEOptionsSpecification = IKEOptionsSpecification()
self.IKEOptionsSpecification._deserialize(params.get("IKEOptionsSpecification"))
if params.get("IPSECOptionsSpecification") is not None:
self.IPSECOptionsSpecification = IPSECOptionsSpecification()
self.IPSECOptionsSpecification._deserialize(params.get("IPSECOptionsSpecification"))
class VpnGateway(AbstractModel):
"""VPN网关对象。
"""
def __init__(self):
"""
:param VpnGatewayId: 网关实例ID。
:type VpnGatewayId: str
:param VpcId: VPC实例ID。
:type VpcId: str
:param VpnGatewayName: 网关实例名称。
:type VpnGatewayName: str
:param Type: 网关实例类型:'IPSEC', 'SSL'。
:type Type: str
:param State: 网关实例状态, 'PENDING':生产中,'DELETING':删除中,'AVAILABLE':运行中。
:type State: str
:param PublicIpAddress: 网关公网IP。
:type PublicIpAddress: str
:param RenewFlag: 网关续费类型:'NOTIFY_AND_MANUAL_RENEW':手动续费,'NOTIFY_AND_AUTO_RENEW':自动续费
:type RenewFlag: str
:param InstanceChargeType: 网关付费类型:POSTPAID_BY_HOUR:按小时后付费,PREPAID:包年包月预付费,
:type InstanceChargeType: str
:param InternetMaxBandwidthOut: 网关出带宽。
:type InternetMaxBandwidthOut: int
:param CreatedTime: 创建时间。
:type CreatedTime: str
:param ExpiredTime: 预付费网关过期时间。
:type ExpiredTime: str
:param IsAddressBlocked: 公网IP是否被封堵。
:type IsAddressBlocked: bool
:param NewPurchasePlan: 计费模式变更,PREPAID_TO_POSTPAID:包年包月预付费到期转按小时后付费。
:type NewPurchasePlan: str
:param RestrictState: 网关计费装,PROTECTIVELY_ISOLATED:被安全隔离的实例,NORMAL:正常。
:type RestrictState: str
"""
self.VpnGatewayId = None
self.VpcId = None
self.VpnGatewayName = None
self.Type = None
self.State = None
self.PublicIpAddress = None
self.RenewFlag = None
self.InstanceChargeType = None
self.InternetMaxBandwidthOut = None
self.CreatedTime = None
self.ExpiredTime = None
self.IsAddressBlocked = None
self.NewPurchasePlan = None
self.RestrictState = None
def _deserialize(self, params):
self.VpnGatewayId = params.get("VpnGatewayId")
self.VpcId = params.get("VpcId")
self.VpnGatewayName = params.get("VpnGatewayName")
self.Type = params.get("Type")
self.State = params.get("State")
self.PublicIpAddress = params.get("PublicIpAddress")
self.RenewFlag = params.get("RenewFlag")
self.InstanceChargeType = params.get("InstanceChargeType")
self.InternetMaxBandwidthOut = params.get("InternetMaxBandwidthOut")
self.CreatedTime = params.get("CreatedTime")
self.ExpiredTime = params.get("ExpiredTime")
self.IsAddressBlocked = params.get("IsAddressBlocked")
self.NewPurchasePlan = params.get("NewPurchasePlan")
self.RestrictState = params.get("RestrictState") | 2.03125 | 2 |
mlcsim/dist.py | nobodywasishere/MLCSim | 0 | 6701 | #!/usr/bin/env python
"""Distribution functions
This module provides functions for dealing with normal distributions
and generating error maps.
When called directly as main, it allows for converting a threshold map
into an error map.
```
$ python -m mlcsim.dist --help
usage: dist.py [-h] [-b {1,2,3,4}] -f F [-o O]
options:
-h, --help show this help message and exit
-b {1,2,3,4} bits per cell
-f F Threshold map json to convert
-o O output to file
```
"""
import argparse
import json
from pprint import pprint
from typing import Dict, List
import numpy as np
from scipy import stats as ss # type: ignore
# https://stackoverflow.com/a/32574638/9047818
# https://stackoverflow.com/a/13072714/9047818
def normalMidpoint(mean_a: float, mean_b: float, std_a: float, std_b: float) -> float:
"""Find the midpoint between two normal distributions
Args:
mean_a (float): Mean of first distribution
mean_b (float): Mean of second distribution
std_a (float): Std dev of first distribution
std_b (float): Std dev of second distribution
Returns:
float: Midpoint between distributions
"""
a = 1 / (2 * std_a**2) - 1 / (2 * std_b**2)
b = mean_b / (std_b**2) - mean_a / (std_a**2)
c = (
mean_a**2 / (2 * std_a**2)
- mean_b**2 / (2 * std_b**2)
- np.log(std_b / std_a)
)
roots = np.roots([a, b, c])
masked = np.ma.masked_outside(roots, mean_a, mean_b)
return float(masked[~masked.mask][0][0])
# https://www.askpython.com/python/normal-distribution
def normalChance(mean: float, stdev: float, thr: float) -> float:
"""Find the chance of a normal distribution above/below a given value
Args:
mean (float): Mean of the distribution
stdev (float): Std dev of the distribution
thr (float): Threshold to check above/below
Returns:
float: Chance for threshold to end up above/below the given point in the distribution
"""
chance = ss.norm(loc=mean, scale=stdev).cdf(thr)
return float(chance if mean > thr else 1 - chance)
def genErrorMap(thr_maps: Dict[str, List[List[float]]], bpc: int) -> List[List[float]]:
"""Generate an error map from a threshold map
Args:
thr_maps (dict): Threshold map
bpc (int): Bits per cell
Raises:
ValueError: if the given bpc is not in the threshold map
Returns:
list: Error map from the threshold map
"""
if str(bpc) not in thr_maps.keys():
raise ValueError(f"Threshold map does not have values for {bpc} levels")
thr_map: List[List[float]] = thr_maps[str(bpc)]
err_map = [[0.0]]
for i in range(len(thr_map) - 1):
mid = normalMidpoint(
thr_map[i][0], thr_map[i + 1][0], thr_map[i][1], thr_map[i + 1][1]
)
up = normalChance(thr_map[i][0], thr_map[i][1], mid)
dn = normalChance(thr_map[i + 1][0], thr_map[i + 1][1], mid)
err_map[i].append(up)
err_map.append([dn])
err_map[-1].append(0.0)
return err_map
def _main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-b", type=int, default=2, choices=range(1, 5), help="bits per cell"
)
parser.add_argument("-f", required=True, help="Threshold map json to convert")
parser.add_argument("-o", type=str, help="output to file")
args = parser.parse_args()
with open(args.f) as f:
thr_map = json.load(f)
err_map = genErrorMap(thr_map, args.b)
if args.o:
with open(args.o, "w") as f:
json.dump(err_map, f)
else:
pprint(err_map)
if __name__ == "__main__":
_main()
| 3 | 3 |
Pr-Lab5/lab5.py | JackShen1/pr-labs | 2 | 6702 | earth = {
"Asia":
{'Japan': ("Tokyo", 377975, 125620000)},
"Europe":
{'Austria': ("Vienna", 83800, 8404000),
'Germany': ("Berlin", 357000, 81751000),
'Great Britain': ("London", 244800, 62700000),
'Iceland': ("Reykjavík", 103000, 317630),
'Italy': ("Rome", 301400, 60605000),
'Spain': ("Madrid", 506000, 46162000),
'Ukraine': ("Kyiv", 603700, 45562000)}
}
class Earth:
def __init__(self, continent):
self.dictionary = earth
self.continent = continent
def continent_out(self, a):
print(
" Country " + " " * 20 + " Capital " + " " * 15 + " Area (km²) " + " " * 7 + " Population " + "\n" +
"-----------" + " " * 20 + "-----------" + " " * 15 + "-------------------" + " " * 7 + "--------------")
for x in self.dictionary.get(a.title()):
print("{:30}".format(x),
"{:<30}{:<25}{:<25}".format(self.dictionary.get(a.title())[x][0],
str(self.dictionary.get(a.title())[x][1]) + " km²",
str(self.dictionary.get(a.title())[x][2])))
def country_out(self, a):
a.insert(0, ('Continent', ('Capital', 'Area (km²)', 'Population')))
b = []
for i in a:
b.extend((i[0], i[1][0], str(i[1][1]), str(i[1][2])))
return ("{:<20}{:<20}{:<25}{:<25}\n" * len(a)).format(*b)
def print_continent(self):
return self.continent_out(self.continent)
def print_country(self, a):
for i in self.dictionary.keys():
continent = i
country_describe = self.dictionary.get(continent).get(a.title())
if country_describe is None: continue
return self.country_out([(continent, country_describe)])
input_str = input("Enter the name of the continent or country: ")
if input_str.title() in earth.keys():
Earth(input_str).print_continent()
else:
print(Earth(continent=None).print_country(input_str))
| 3.796875 | 4 |
vue/repositories.bzl | ubiquitoustech/rules_vue | 0 | 6703 | """Declare runtime dependencies
These are needed for local dev, and users must install them as well.
See https://docs.bazel.build/versions/main/skylark/deploying.html#dependencies
"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
# WARNING: any changes in this function may be BREAKING CHANGES for users
# because we'll fetch a dependency which may be different from one that
# they were previously fetching later in their WORKSPACE setup, and now
# ours took precedence. Such breakages are challenging for users, so any
# changes in this function should be marked as BREAKING in the commit message
# and released only in semver majors.
def rules_vue_dependencies():
# The minimal version of bazel_skylib we require
maybe(
http_archive,
name = "bazel_skylib",
sha256 = "c6966ec828da198c5d9adbaa94c05e3a1c7f21bd012a0b29ba8ddbccb2c93b0d",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
],
)
maybe(
http_archive,
name = "build_bazel_rules_nodejs",
sha256 = "4913ea835810c195df24d3a929315c29a64566cc48e409d8b0f35008b4e02e59",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/4.4.4/rules_nodejs-4.4.4.tar.gz"],
)
| 1.601563 | 2 |
endpoints/api/test/test_tag.py | kwestpharedhat/quay | 0 | 6704 | import pytest
from playhouse.test_utils import assert_query_count
from data.registry_model import registry_model
from data.database import Manifest
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags
from test.fixtures import *
@pytest.mark.parametrize(
"expiration_time, expected_status",
[
(None, 201),
("aksdjhasd", 400),
],
)
def test_change_tag_expiration_default(expiration_time, expected_status, client, app):
with client_with_identity("devtable", client) as cl:
params = {
"repository": "devtable/simple",
"tag": "latest",
}
request_body = {
"expiration": expiration_time,
}
conduct_api_call(cl, RepositoryTag, "put", params, request_body, expected_status)
def test_change_tag_expiration(client, app):
with client_with_identity("devtable", client) as cl:
params = {
"repository": "devtable/simple",
"tag": "latest",
}
repo_ref = registry_model.lookup_repository("devtable", "simple")
tag = registry_model.get_repo_tag(repo_ref, "latest")
updated_expiration = tag.lifetime_start_ts + 60 * 60 * 24
request_body = {
"expiration": updated_expiration,
}
conduct_api_call(cl, RepositoryTag, "put", params, request_body, 201)
tag = registry_model.get_repo_tag(repo_ref, "latest")
assert tag.lifetime_end_ts == updated_expiration
@pytest.mark.parametrize(
"manifest_exists,test_tag,expected_status",
[
(True, "-INVALID-TAG-NAME", 400),
(True, ".INVALID-TAG-NAME", 400),
(
True,
"INVALID-TAG_NAME-BECAUSE-THIS-IS-WAY-WAY-TOO-LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG",
400,
),
(False, "newtag", 404),
(True, "generatemanifestfail", None),
(True, "latest", 201),
(True, "newtag", 201),
],
)
def test_move_tag(manifest_exists, test_tag, expected_status, client, app):
with client_with_identity("devtable", client) as cl:
test_image = "unknown"
if manifest_exists:
repo_ref = registry_model.lookup_repository("devtable", "simple")
tag_ref = registry_model.get_repo_tag(repo_ref, "latest")
assert tag_ref
test_image = tag_ref.manifest.digest
params = {"repository": "devtable/simple", "tag": test_tag}
request_body = {"manifest_digest": test_image}
if expected_status is None:
with pytest.raises(Exception):
conduct_api_call(cl, RepositoryTag, "put", params, request_body, expected_status)
else:
conduct_api_call(cl, RepositoryTag, "put", params, request_body, expected_status)
@pytest.mark.parametrize(
"repo_namespace, repo_name, query_count",
[
("devtable", "simple", 4),
("devtable", "history", 4),
("devtable", "complex", 4),
("devtable", "gargantuan", 4),
("buynlarge", "orgrepo", 6), # +2 for permissions checks.
("buynlarge", "anotherorgrepo", 6), # +2 for permissions checks.
],
)
def test_list_repo_tags(repo_namespace, repo_name, client, query_count, app):
# Pre-cache media type loads to ensure consistent query count.
Manifest.media_type.get_name(1)
params = {"repository": repo_namespace + "/" + repo_name}
with client_with_identity("devtable", client) as cl:
with assert_query_count(query_count):
tags = conduct_api_call(cl, ListRepositoryTags, "get", params).json["tags"]
repo_ref = registry_model.lookup_repository(repo_namespace, repo_name)
history, _ = registry_model.list_repository_tag_history(repo_ref)
assert len(tags) == len(history)
| 1.890625 | 2 |
inventory.py | Jongerr/vendor_receiving | 0 | 6705 | import json
import os
import random
import requests
from passlib.hash import pbkdf2_sha256 as pbk
from PyQt5.QtSql import QSqlDatabase, QSqlQuery
from pprint import pprint
ENCODING = 'utf-8'
DB_PATH = os.path.join(os.path.curdir, 'inventory.db')
def scrambleWord(word):
"""Randomize the letters in word and return the resulting string."""
word_list = list(word)
random.shuffle(word_list)
word = ''.join(word_list)
return word
def generateItems():
"""Generate a dictionary of retail products and store the data in items.json.
Pulls a list of items and artificially doubles it with scrambled item names.
Each item is given a random PLU, UPC, and department number.
Each dictionary key is the item's PLU.
"""
response = requests.get('https://www.randomlists.com/data/things.json')
json_data = response.json()
items = json_data['RandL']['items']
#double sample size by scrambling item names
scrambled_list = []
for item in items:
scrambled_item = scrambleWord(item)
scrambled_list.append(scrambled_item)
items = items + scrambled_list
data = {}
for item in items:
random.seed(item)
upc = random.randint(100000000000, 999999999999)
plu = random.randint(1000, 9999999)
department = (plu % 7) + 1
print('UPC:{0} | PLU:{1} | Item:{2} | D{3}'.format(upc, plu, item, department))
if plu in data:
print('Duplicate found: {}'.format(plu))
continue
data[plu] = {'upc':upc, 'department':department, 'model':item}
with open('items.json', 'w') as f:
json.dump(data, f)
def generatePO():
"""Create dumby Purchase Orders and store them in pos.json.
Each PO is asigned one random vendor and department number,
along with a random length list of items belonging to said department.
Returns: True if items.json successfully opens, False otherwise.
"""
try:
with open('items.json', 'r') as f:
items_dict = json.load(f)
except FileNotFoundError:
return False
vendors = ['Dyson', 'Ingrammicro', 'LKG', 'Inland', 'Sandisk', 'Seagate', 'Hasbro', 'Mattel',\
'Gear Head', 'Logitech', 'NTE', 'Dell', 'Microsoft', 'Right Stuff', 'Alliance', 'Energizer']
po_dict = {}
for i in range(50):
po_num = 24000000 + random.randint(1, 999999)
if po_num in po_dict:
continue
po_dict[po_num] = {'department': (po_num % 7) + 1, 'items': {}, 'vendor': random.choice(vendors)}
for key in items_dict:
match_found = False
loops = 0
while not match_found:
loops += 1
if loops > 200:
print('\n\nToo many loops.\n\n')
break
po, department = random.choice(list(po_dict.items()))
department = department['department']
print('PO department: {}'.format(department))
print('item plu: {} department: {}'.format(key, items_dict[key]['department']))
if items_dict[key]['department'] == department:
max_count = random.randint(1, 20)
po_dict[po]['items'][key] = max_count
match_found = True
with open('pos.json', 'w') as f:
json.dump(po_dict, f)
return True
def fillDB():
"""Create a database and populate two tables(named items and purchase_order).
The 'items' and 'purchase_order' tables are populated with the data from items.json
and pos.json respectively.
"""
with open('items.json') as f:
data = json.load(f)
db = QSqlDatabase.addDatabase('QSQLITE')
db.setDatabaseName(DB_PATH)
if not db.open():
print('DB could not be opened')
error = QSqlDatabase.lastError()
print(error.text())
return False
query = QSqlQuery()
if query.exec_("drop table items"):
print('successfully dropped table')
else:
print('unsuccessfully dropped table')
print(query.lastError().text())
if query.exec_("create table items(plu int primary key, upc varchar(12) unique, "
"model varchar(20), department int)"):
print('success')
else:
print('failure')
print(query.lastError().text())
for key in data:
if query.exec_("insert into items values({}, '{}', '{}', {})".format(key, data[key]['upc'],
data[key]['model'], data[key]['department'])):
print("values({}, {}, {}, {}) successfully inserted.".format(key, data[key]['upc'], data[key]['model'], data[key]['department']))
else:
print("values({}, {}, {}, {}) unsuccessfully inserted.".format(key, data[key]['upc'], data[key]['model'], data[key]['department']))
print(query.lastError().text())
with open('pos.json') as f:
po_dict = json.load(f)
if query.exec_("drop table purchase_order"):
print('successfully dropped table')
else:
print('unsuccessfully dropped table')
print(query.lastError().text())
if query.exec_("create table purchase_order(po int primary key, vendor varchar(30), "
"department int, items blob)"):
print('success')
else:
print('failure')
print(query.lastError().text())
for key in po_dict:
item_string = json.dumps(po_dict[key]['items'])
item_blob = item_string.encode(ENCODING)
if query.exec_("insert into purchase_order values({}, '{}', {}, '{}')"\
.format(key, po_dict[key]['vendor'], po_dict[key]['department'], item_string)):
print("values({}, {}, {}, {}) successfully inserted."\
.format(key, po_dict[key]['vendor'], po_dict[key]['department'], item_string))
else:
print("values({}, {}, {}, {}) unsuccessfully inserted."\
.format(key, po_dict[key]['vendor'], po_dict[key]['department'], item_blob))
print(query.lastError().text())
def createEmployeeTable():
db = QSqlDatabase.addDatabase('QSQLITE')
db.setDatabaseName(DB_PATH)
if not db.open():
print('DB could not be opened')
error = QSqlDatabase.lastError()
print(error.text())
return False
query = QSqlQuery()
if not query.exec_("drop table employee"):
print(query.lastError().text())
if not query.exec_("create table employee(id int primary key, first_name varchar(10), "\
"last_name varchar(10), posistion int, pass_hash varchar(200))"):
print(query.lastError().text())
if not query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(162973, 'Jon', 'Michie', 2, pbk.hash('Michie'))):
print(query.lastError().text())
query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(131901, 'Ben', 'Terry', 3, pbk.hash('Terry')))
query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(150697, 'Daniel', 'Silva', 2, pbk.hash('Silva')))
query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(68412, 'James', 'Hutchetson', 2, pbk.hash('Hutchetson')))
query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(161844, 'MacKenly', 'Gamble', 1, pbk.hash('Gamble')))
query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(141047, 'George', 'Huston', 1, pbk.hash('Huston')))
query.exec_("insert into employee values({}, '{}', '{}', {}, '{}')".\
format(46045, 'Arthur', 'Art', 1, pbk.hash('Art')))
def testHashVerification(name):
db = QSqlDatabase.addDatabase('QSQLITE')
db.setDatabaseName(DB_PATH)
if not db.open():
print('DB could not be opened')
error = QSqlDatabase.lastError()
print(error.text())
return False
query = QSqlQuery()
if not query.exec_("select pass_hash from employee where last_name = '{}'".format(name)):
print(query.lastError().text())
elif not query.next():
print('Table values not found')
else:
pass_hash = query.value(0)
if pbk.verify(name, pass_hash):
print('It\'s a match!')
else:
print('Match not found.')
if __name__ == '__main__':
generateItems()
generatePO()
fillDB()
createEmployeeTable()
testHashVerification('Terry')
| 3.15625 | 3 |
lnbits/core/views/lnurl.py | frennkie/lnbits | 0 | 6706 | import requests
from flask import abort, redirect, request, url_for
from lnurl import LnurlWithdrawResponse, handle as handle_lnurl
from lnurl.exceptions import LnurlException
from time import sleep
from lnbits.core import core_app
from lnbits.helpers import Status
from lnbits.settings import WALLET
from ..crud import create_account, get_user, create_wallet, create_payment
@core_app.route("/lnurlwallet")
def lnurlwallet():
memo = "LNbits LNURL funding"
try:
withdraw_res = handle_lnurl(request.args.get("lightning"), response_class=LnurlWithdrawResponse)
except LnurlException:
abort(Status.INTERNAL_SERVER_ERROR, "Could not process withdraw LNURL.")
try:
ok, checking_id, payment_request, error_message = WALLET.create_invoice(withdraw_res.max_sats, memo)
except Exception as e:
ok, error_message = False, str(e)
if not ok:
abort(Status.INTERNAL_SERVER_ERROR, error_message)
r = requests.get(
withdraw_res.callback.base,
params={**withdraw_res.callback.query_params, **{"k1": withdraw_res.k1, "pr": payment_request}},
)
if not r.ok:
abort(Status.INTERNAL_SERVER_ERROR, "Could not process withdraw LNURL.")
for i in range(10):
invoice_status = WALLET.get_invoice_status(checking_id)
sleep(i)
if not invoice_status.paid:
continue
break
user = get_user(create_account().id)
wallet = create_wallet(user_id=user.id)
create_payment(
wallet_id=wallet.id,
checking_id=checking_id,
amount=withdraw_res.max_sats * 1000,
memo=memo,
pending=invoice_status.pending,
)
return redirect(url_for("core.wallet", usr=user.id, wal=wallet.id))
| 2.28125 | 2 |
driver_training/driver_training.py | munishm/MLOpsPython | 0 | 6707 | # Import libraries
import argparse
from azureml.core import Run
import joblib
import json
import os
import pandas as pd
import shutil
# Import functions from train.py
from train import split_data, train_model, get_model_metrics
# Get the output folder for the model from the '--output_folder' parameter
parser = argparse.ArgumentParser()
parser.add_argument('--output_folder', type=str, dest='output_folder', default="outputs")
args = parser.parse_args()
print(args)
output_folder = args.output_folder
# Get the experiment run context
run = Run.get_context()
# load the safe driver prediction dataset
train_df = pd.read_csv('porto_seguro_safe_driver_prediction_input.csv')
# Load the parameters for training the model from the file
with open("parameters.json") as f:
pars = json.load(f)
parameters = pars["training"]
# Log each of the parameters to the run
for param_name, param_value in parameters.items():
run.log(param_name, param_value)
# Call the functions defined in this file
train_data, valid_data = split_data(train_df)
data = [train_data, valid_data]
model = train_model(data, parameters)
# Print the resulting metrics for the model
model_metrics = get_model_metrics(model, data)
print(model_metrics)
for k, v in model_metrics.items():
run.log(k, v)
# Save the trained model to the output folder
os.makedirs(output_folder, exist_ok=True)
output_path = output_folder + "/porto_seguro_safe_driver_model.pkl"
joblib.dump(value=model, filename=output_path)
run.complete()
| 2.828125 | 3 |
tests/__init__.py | madman-bob/python-lua-imports | 3 | 6708 | from lua_imports import lua_importer
lua_importer.register()
| 1.0625 | 1 |
app/models/product.py | VyachAp/SalesFABackend | 80 | 6709 | <filename>app/models/product.py
from sqlalchemy import Column, Integer, String, Float
from app.database.base_class import Base
class Product(Base):
id = Column(Integer, primary_key=True, index=True)
name = Column(String, nullable=False)
price = Column(Float, nullable=False)
| 2.703125 | 3 |
katana/utils/directory_traversal_utils.py | warriorframework/Katanaframework | 1 | 6710 | <reponame>warriorframework/Katanaframework<gh_stars>1-10
import glob
import os
import re
import errno
import shutil
def get_sub_dirs_and_files(path, abs_path=False):
"""
Gets the direct child sub-files and sub-folders of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-directories and
sub-files instead of directory names only
Returns:
dict: {"folders": [list of (if abs_path is True, then path to) sub-folders],
"files": [list of (if abs_path is True, then path to) sub-files]}
"""
folders = get_sub_folders(path, abs_path=abs_path)
files = get_sub_files(path, abs_path=abs_path)
return {"folders": folders, "files": files}
def get_sub_folders(path, abs_path=False):
"""
Gets the direct child sub-folders of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-directories
instead of directory names only
Returns:
only_folders: [list of sub-folders]
"""
folders = []
temp = glob.glob(path + os.sep + "*")
for folder in temp:
if os.path.isdir(folder) and not folder.endswith('__pycache__'):
folders.append(folder)
only_folders = [f.replace("\\", '/') for f in folders]
if not abs_path:
only_folders = [f.rpartition('/')[2] for f in only_folders]
return only_folders
def get_sub_files(path, abs_path=False):
"""
Gets the direct child sub-files of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-files instead of
file names only
Returns:
only_files: [list of sub-files]
"""
files = glob.glob(path + os.sep + "*.*")
only_files = [f.replace("\\", '/') for f in files]
if not abs_path:
only_files = [f.rpartition('/')[2] for f in only_files]
return only_files
def get_abs_path(relative_path, base_path=None, silence_error=False):
"""
Gets the absolute path from the given relative_path and base_path
Args:
relative_path: relative path to the file/directory
base_path: absolute path from where the relative path should be traced. If not provided, the
current working directory path will be used.
silence_error: Setting this to True would not verify if the directory exists
Returns:
path: absolute path derived from relative_path and base_path
"""
if base_path is None:
base_path = os.getcwd()
path = os.path.join(base_path.strip(), relative_path.strip())
if not silence_error and not os.path.exists(path):
print("An Error Occurred: {0} does not exist".format(path))
path = None
return path
def get_parent_directory(directory_path, level=1):
"""
Gets the parent directory
Args:
directory_path: Absolute path to the file/dir who's parent needs to be returned
level: Indicates how many levels up to go to find the parent
eg: default of 1 goes one level up (to the parent directory)
level=2 would get the grandparent directory
Returns:
"""
if directory_path.endswith(os.sep):
directory_path = directory_path[:-1]
for i in range(0, level):
directory_path = os.path.dirname(directory_path)
return directory_path
def get_paths_of_subfiles(parent_dir, extension=re.compile("\..*")):
"""
This function returns a list of all the sub-files inside the given directory
Args:
parent_dir: Absolute path to the directory
extension: Regular Expression tha would match a file extension. If not provided, file paths
of all extension will be returned
Returns:
file_path: Returns a list of paths to sub-files inside the parent_dir
"""
file_paths = []
sub_files_and_folders = get_sub_dirs_and_files(parent_dir, abs_path=True)
for sub_file in sub_files_and_folders["files"]:
if extension.match(os.path.splitext(sub_file)[1]):
file_paths.append(sub_file)
for sub_folder in sub_files_and_folders["folders"]:
file_paths.extend(get_paths_of_subfiles(sub_folder, extension=extension))
return file_paths
def get_dir_from_path(path):
"""
This function is wrapper function for os.path.basename.
Args:
path: a file path [Eg: /home/user/Documents/GitHub/warriorframework]
Returns:
The base directory name: [Eg: warriorframework]
"""
return os.path.basename(path)
def get_parent_dir_path(path):
"""
This function is wrapper function for os.path.dirname(os.path.normpath(<path>)).
Args:
path: a file path [Eg: /home/user/Documents/GitHub/warriorframework]
Returns:
The parent directory path: [Eg: /home/user/Documents/GitHub]
"""
return os.path.dirname(os.path.normpath(path))
def join_path(path, *paths):
"""
This function is wrapper function for os.path.join.
Args:
path: a file path
*paths: paths to be joined to the file path above
Returns:
Joined path
"""
return os.path.join(path, *paths)
def get_relative_path(path, start_directory):
"""
This is a wrapper function for the os.path.relpath
Args:
path: Absolute path to the file/dir to which the relatove path needs to be calculated.
start_directory: The absolute path to the starting directory
Returns:
rel_path: A relative path from start_directory
"""
if start_directory == "":
print("-- Error -- start_directory is empty.")
relpath = path
else:
try:
relpath = os.path.relpath(path, start_directory)
except Exception as e:
print("-- Error -- {0}".format(e))
relpath = None
else:
if not relpath.startswith(".") and not relpath.startswith(os.sep):
relpath = os.sep + relpath
return relpath
def create_dir(path):
output = path
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
output = False
print("-- A Error Occurred -- {0}".format(exception))
return output
def delete_dir(src):
output = True
try:
shutil.rmtree(src)
except Exception as e:
print(e)
output = False
return output
def file_or_dir_exists(filepath):
output = False
if os.path.exists(filepath):
output = True
return output
def get_direct_sub_files(path, abs_path=False, extension=re.compile("\..*")):
"""
Gets the direct child sub-files of the given directory
Args:
path: Absolute path to the directory
abs_path: If set to True, it returns a list of absolute paths to the sub-files instead of
file names only
Returns:
only_files: [list of sub-files]
"""
files = glob.glob(path + os.sep + "*.*")
only_files = [f.replace("\\", '/') for f in files]
if not abs_path:
only_files = [f.rpartition('/')[2] for f in only_files]
final_files = []
for sub_file in only_files:
if extension.match(os.path.splitext(sub_file)[1]):
final_files.append(sub_file)
return final_files
| 3 | 3 |
alignment.py | LucaOnline/theanine-synthetase | 0 | 6711 | <gh_stars>0
"""The `alignment` module provides an implementation of the Needleman-Wunsch alignment algorithm."""
from typing import Tuple, Literal, List
from math import floor
import numpy as np
from stats import variance
MOVE_DIAGONAL = 0
MOVE_RIGHT = 1
MOVE_DOWN = 2
EditMove = Literal[MOVE_DIAGONAL, MOVE_RIGHT, MOVE_DOWN]
CHEMICAL_CLASS = {
"A": "Purine",
"G": "Purine",
"T": "Pyrimidine",
"C": "Pyrimidine",
}
class AlignmentResult:
"""
AlignmentResult represents the result of performing an alignment on two sequences.
"""
def __init__(self, alignment_1: str, alignment_2: str):
"""
Produces a new AlignmentResult representing the result of performing an alignment on
two sequences.
"""
if len(alignment_1) != len(alignment_2):
raise ValueError("input strings have differing lengths")
self.alignment_1 = alignment_1
self.alignment_2 = alignment_2
def get_alignment_length(self) -> int:
"""Returns the length of the alignment."""
return len(self.alignment_1)
def get_alignment_1(self) -> str:
"""Returns the first alignment string."""
return self.alignment_1
def get_alignment_2(self) -> str:
"""Returns the second alignment string."""
return self.alignment_2
def get_match_string(self) -> str:
"""Returns the match string for the alignment."""
return "".join(
[
"|" if self.alignment_1[i] == self.alignment_2[i] else " "
for i in range(len(self.alignment_1))
]
)
def clustered_mismatches(self, cluster_count: int) -> List[int]:
"""
Breaks the alignment into `cluster_count` clusters and
returns the number of mismatches in each cluster. If the
alignment cannot be equally divided into the number of
clusters, this leaves the last cluster with the remainder
of the mismatches.
"""
if cluster_count < 1:
raise ValueError("cluster count must be greater than or equal to 1")
match_string = self.get_match_string()
cluster_size = floor(len(match_string) / cluster_count)
return [
match_string[i * cluster_size : i * cluster_size + cluster_size].count(" ")
for i in range(0, len(match_string) // cluster_size)
]
def clustered_mismatch_variance(self, cluster_count: int) -> float:
"""
Returns the variance between the mismatch clusters. The
raw cluster mismatches can be retrieved with the
`clustered_mismatches` method. `cluster_count` controls
the number of clusters used.
"""
return variance(
np.array(self.clustered_mismatches(cluster_count=cluster_count)),
sample=False,
)
def matches(self) -> int:
"""Returns the number of matching elements for the alignment."""
return self.get_match_string().count("|")
def hamming_distance(self) -> int:
"""Returns the Hamming distance of the alignment."""
return len(self.alignment_1) - self.matches()
def largest_mismatch(self) -> Tuple[int, int]:
"""Returns the position and size of the largest mismatch in the alignment."""
matches = self.get_match_string()
found_mismatch = False
largest_mismatch = 0
largest_mismatch_pos = 0
current_mismatch = 0
for i, c in enumerate(matches):
if c == " ":
found_mismatch = True
current_mismatch += 1
if current_mismatch > largest_mismatch:
largest_mismatch = current_mismatch
largest_mismatch_pos = i - largest_mismatch + 1
else:
current_mismatch = 0
if found_mismatch:
return (largest_mismatch_pos, largest_mismatch)
return (-1, 0)
def format_result(self, line_length: int = 80):
"""
Formats the found alignment with pipes between
matching elements. The optional `line_length` parameter
allows for adjusting the number of elements on each set of
lines.
"""
matches = self.get_match_string()
# Chunk lines
alignment_1_lines = [
self.alignment_1[i : i + line_length]
for i in range(0, len(self.alignment_1), line_length)
]
alignment_2_lines = [
self.alignment_2[i : i + line_length]
for i in range(0, len(self.alignment_2), line_length)
]
match_lines = [
matches[i : i + line_length] for i in range(0, len(matches), line_length)
]
# Output line chunks in order
return "\n".join(
[
"\n".join(
[alignment_1_lines[i], match_lines[i], alignment_2_lines[i], ""]
)
for i in range(len(match_lines))
]
)
def examine(self, line_length: int = 80):
"""
Formats and prints the found alignment with pipes between
matching elements. The optional `line_length` parameter
allows for adjusting the number of elements on each set of
lines.
"""
print(self.format_result(line_length=line_length))
def backtrack(quad: np.ndarray) -> EditMove:
"""Trace one step back through an edit matrix."""
if quad.shape == (0, 2):
return MOVE_DOWN
elif quad.shape == (2, 0):
return MOVE_RIGHT
# numpy's argmax doesn't allow for prioritizing non-indels
next_pos = (0, 0)
if quad[0, 1] > quad[next_pos]:
next_pos = (0, 1)
if quad[1, 0] > quad[next_pos]:
next_pos = (1, 0)
if next_pos == (0, 0):
return MOVE_DIAGONAL
elif next_pos == (0, 1):
return MOVE_RIGHT
else:
return MOVE_DOWN
def score_cell(
quad: np.ndarray,
top_char: str,
left_char: str,
nucleotides: bool,
chemical_classes: dict,
) -> np.int:
"""Calculate the Needleman-Wunsch score for a cell."""
down_score = quad[0, 1] - 1
right_score = quad[1, 0] - 1
# Penalize transversions more heavily
if nucleotides and chemical_classes[top_char] != chemical_classes[left_char]:
down_score -= 1
right_score -= 1
diag_score = quad[0, 0] - 1
if top_char == left_char:
diag_score += 2
return max([down_score, right_score, diag_score])
def align_sequences(
top_seq: str, left_seq: str, nucleotides: bool = True
) -> AlignmentResult:
"""
This function aligns the two provided sequences using Needleman-Wunsch
alignment. It uses a scoring scheme with a gap penalty of -1, a match
bonus of 1, and a mismatch penalty of -1. If the two sequences are
`nucleotides`, then an additional -1 penalty is applied to transversions.
"""
size1 = len(top_seq) + 1
size2 = len(left_seq) + 1
chemical_classes = CHEMICAL_CLASS # Copy this into the local scope so it can be accessed more quickly
# Build search matrix
search = np.zeros((size2, size1), dtype=np.int)
search[0] = [i for i in range(0, -size1, -1)]
search[:, 0] = [i for i in range(0, -size2, -1)]
# Do scoring
for x in range(1, size2):
for y in range(1, size1):
search[x, y] = score_cell(
search[x - 1 : x + 1, y - 1 : y + 1],
top_seq[y - 1],
left_seq[x - 1],
nucleotides,
chemical_classes,
)
search = search.T
# Unwind result
final_top = ""
final_left = ""
bt_x, bt_y = (size1 - 1, size2 - 1)
while bt_x != 0 or bt_y != 0:
next_move = backtrack(search[bt_x - 1 : bt_x + 1, bt_y - 1 : bt_y + 1])
if next_move == MOVE_DIAGONAL:
final_top = top_seq[bt_x - 1] + final_top
final_left = left_seq[bt_y - 1] + final_left
bt_x -= 1
bt_y -= 1
elif next_move == MOVE_DOWN:
final_top = "-" + final_top
final_left = left_seq[bt_y - 1] + final_left
bt_y -= 1
elif next_move == MOVE_RIGHT:
final_top = top_seq[bt_x - 1] + final_top
final_left = "-" + final_left
bt_x -= 1
return AlignmentResult(final_top, final_left)
| 3.234375 | 3 |
examples/the-feeling-of-success/mock_grasp_object_op.py | yujialuo/erdos | 0 | 6712 | from mock_gripper_op import MockGripType
from std_msgs.msg import Bool
from erdos.op import Op
from erdos.data_stream import DataStream
from erdos.message import Message
class MockGraspObjectOperator(Op):
"""
Sends a "close" action to the gripper.
"""
gripper_stream = "gripper-output-stream"
action_complete_stream_name = "grasp-action-complete-stream"
def __init__(self, name):
"""
Initializes a lock which blocks future actions to be sent until the
past actions are completed.
"""
super(MockGraspObjectOperator, self).__init__(name)
self.move_ahead_lock = True
@staticmethod
def setup_streams(input_streams, trigger_stream_name, gripper_stream_name):
"""
Registers callbacks on the given streams and returns two streams, one
of which sends the action to the gripper and the other returns a
message upon the completion of the action.
"""
input_streams.filter_name(trigger_stream_name)\
.add_callback(MockGraspObjectOperator.grasp_object)
input_streams.filter_name(gripper_stream_name)\
.add_callback(MockGraspObjectOperator.release_lock)
return [
DataStream(
data_type=MockGripType,
name=MockGraspObjectOperator.gripper_stream),
DataStream(
data_type=Bool,
name=MockGraspObjectOperator.action_complete_stream_name)
]
def grasp_object(self, msg):
"""
Sends a close action to the gripper and waits for its completion.
"""
mock_grasp_object = MockGripType("close")
mock_grasp_msg = Message(mock_grasp_object, msg.timestamp)
self.move_ahead_lock = False
self.get_output_stream(
MockGraspObjectOperator.gripper_stream).send(mock_grasp_msg)
while not self.move_ahead_lock:
pass
action_complete_msg = Message(True, msg.timestamp)
self.get_output_stream(
MockGraspObjectOperator.action_complete_stream_name).send(
action_complete_msg)
def release_lock(self, msg):
"""
Releases the lock so that new actions can be sent to the gripper.
"""
self.move_ahead_lock = True
def execute(self):
self.spin()
| 2.59375 | 3 |
src/pyfsa/lib/fsa.py | taliamax/pyfsa | 1 | 6713 | <filename>src/pyfsa/lib/fsa.py
# -*- coding: utf-8 -*-
import pygraphviz as gv # type: ignore
import itertools as it
from typing import (
List,
Optional,
)
from pyfsa.lib.types import TransitionsTable
def get_state_graph(
transitions: TransitionsTable,
start: Optional[str] = None,
end: Optional[str] = None,
nodes: Optional[List[str]] = None,
name: str = 'output.png',
draw: bool = True,
engine: str = 'circo',
) -> gv.AGraph:
'''
From a transition dictionary, creates a pygraphviz graph
of all the possible states and how to reach the given state.
Returns the resulting graph.
'''
graph = gv.AGraph(directed=True, strict=False, ranksep='1')
key_num = it.count()
if nodes is not None:
graph.add_nodes_from(nodes)
else:
graph.add_nodes_from(transitions.keys())
for node, transition_row in transitions.items():
for label, targets in transition_row.items():
for target in targets:
graph.add_edge(
node,
target,
key=f'{next(key_num)}',
label=label,
weight=1,
)
if start:
n: gv.Node = graph.get_node(start)
n.attr['color'] = '#0000FF'
n.attr['style'] = 'filled'
if end:
n = graph.get_node(end)
n.attr['color'] = '#00FF00'
n.attr['style'] = 'filled'
if draw:
graph.layout(prog=engine)
graph.draw(name)
return graph
def verify_string(
string: str,
starting_state: str,
final_state: str,
transitions: TransitionsTable,
) -> bool:
'''
Given a transitions table, a start and end state, and
some string, verifies that executing the finite state machine
on the given string produces the desired final state.
'''
current_state = starting_state
for letter in string:
transition = transitions[current_state]
current_state = transition[letter][0]
return current_state == final_state
def render_string_graph(
string: str,
start: str,
end: str,
transitions: TransitionsTable,
name: str = 'output.png',
draw: bool = True,
engine: str = 'circo'
) -> gv.AGraph:
'''
Given a string, a start state, an end state, end a
transitions table, produces the graph resulting in
the traversal of the string through the states defined
in the transitions table. By default, it will
output a png file of the result, but that can be
suppressed.
'''
graph = gv.AGraph(directed=True)
graph.graph_attr['label'] = f'Evaluating {string}'
node_names = it.count()
current_state = start
node_name = next(node_names)
graph.add_node(node_name)
current_node = gv.Node(graph, node_name)
current_node.attr['label'] = current_state
current_node.attr['fillcolor'] = '#0000FF'
current_node.attr['style'] = 'filled'
for letter in string:
node_name = next(node_names)
graph.add_node(node_name)
next_node = gv.Node(graph, node_name)
# TODO: The algorithm prioritizes just the first
# found state, which may not produce a correct
# answer. Needs to fix this
next_state = transitions[current_state][letter][0]
next_node.attr['label'] = next_state
graph.add_edge(current_node, next_node, label=letter)
current_node = next_node
current_state = next_state
if current_state == end:
current_node.attr['style'] = 'filled'
current_node.attr['fillcolor'] = '#00FF00'
if draw:
graph.layout(prog=engine)
graph.draw(name)
return graph
| 2.6875 | 3 |
examples/multimedia/mmimdb_MFM.py | kapikantzari/MultiBench | 148 | 6714 | import torch
import sys
import os
sys.path.append(os.getcwd())
from utils.helper_modules import Sequential2
from unimodals.common_models import Linear, MLP, MaxOut_MLP
from datasets.imdb.get_data import get_dataloader
from fusions.common_fusions import Concat
from objective_functions.objectives_for_supervised_learning import MFM_objective
from objective_functions.recon import sigmloss1d
from training_structures.Supervised_Learning import train, test
filename = "best_mfm.pt"
traindata, validdata, testdata = get_dataloader(
"../video/multimodal_imdb.hdf5", "../video/mmimdb", vgg=True, batch_size=128)
classes = 23
n_latent = 512
fuse = Sequential2(Concat(), MLP(2*n_latent, n_latent, n_latent//2)).cuda()
encoders = [MaxOut_MLP(512, 512, 300, n_latent, False).cuda(
), MaxOut_MLP(512, 1024, 4096, n_latent, False).cuda()]
head = Linear(n_latent//2, classes).cuda()
decoders = [MLP(n_latent, 600, 300).cuda(), MLP(n_latent, 2048, 4096).cuda()]
intermediates = [MLP(n_latent, n_latent//2, n_latent//2).cuda(),
MLP(n_latent, n_latent//2, n_latent//2).cuda()]
recon_loss = MFM_objective(2.0, [sigmloss1d, sigmloss1d], [
1.0, 1.0], criterion=torch.nn.BCEWithLogitsLoss())
train(encoders, fuse, head, traindata, validdata, 1000, decoders+intermediates, early_stop=True, task="multilabel",
objective_args_dict={"decoders": decoders, "intermediates": intermediates}, save=filename, optimtype=torch.optim.AdamW, lr=5e-3, weight_decay=0.01, objective=recon_loss)
print("Testing:")
model = torch.load(filename).cuda()
test(model, testdata, method_name="MFM", dataset="imdb",
criterion=torch.nn.BCEWithLogitsLoss(), task="multilabel")
| 1.953125 | 2 |
subeana/migrations/0001_initial.py | izumin2000/izuminapp | 0 | 6715 | # Generated by Django 4.0.2 on 2022-06-01 04:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Channel',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=50)),
('isexist', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Song',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='', max_length=50)),
('lyrics', models.CharField(default='', max_length=5000)),
('url', models.CharField(blank=True, default='', max_length=50, null=True)),
('isexist', models.BooleanField(default=True)),
('channel', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='song_channel', to='subeana.channel')),
('imitate', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='song_imitate', to='subeana.song')),
],
),
]
| 1.851563 | 2 |
bifacialvf/vf.py | shirubana/bifacialvf | 1 | 6716 | <reponame>shirubana/bifacialvf
# -*- coding: utf-8 -*-
"""
ViewFactor module - VF calculation helper files for bifacial-viewfactor
@author <NAME>
@translated to python by sayala 06/09/17
"""
# ensure python3 compatible division and printing
from __future__ import division, print_function, absolute_import
import math
import numpy as np
from sun import solarPos, sunIncident, perezComp, aOIcorrection
import logging
# TODO: set level or add formatters if more advanced logging required
LOGGER = logging.getLogger(__name__) # only used to raise errors
DTOR = math.pi / 180.0 # Factor for converting from degrees to radians
def getBackSurfaceIrradiances(rowType, maxShadow, PVbackSurface, beta, sazm,
dni, dhi, C, D, albedo, zen, azm, cellRows,
pvBackSH, rearGroundGHI, frontGroundGHI,
frontReflected, offset=0):
"""
This method calculates the AOI corrected irradiance on the back of the PV
module/panel. 11/19/2015
Added rowType and other changes to distinguish between types of rows.
4/19/2016
Added input of offset of reference cell from PV module back (in PV panel
slope lengths) for modeling Sara's reference cell measurements, should be
set to zero for PV module cell irradiances.
Added while loop so projected Xs aren't too negative causing array index
problems (<0) 12/13/2016::
while (projectedX1 < -100.0 || projectedX2 < -100.0):
# Offset so array indexes are >= -100.0 12/13/2016
projectedX1 += 100.0;
projectedX2 += 100.0;
Parameters
----------
rowType : str
Type of row: "first", "interior", "last", or "single"
maxShadow
Maximum shadow length projected to the front(-) or rear (+) from the
front of the module
PVbackSurface
PV module back surface material type, either "glass" or "ARglass"
beta
Tilt from horizontal of the PV modules/panels (deg) (for front surface)
sazm
Surface azimuth of PV panels (deg) (for front surface)
dni
Direct normal irradiance (W/m2)
dhi
Diffuse horizontal irradiance (W/m2)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
albedo
Ground albedo
zen
Sun zenith (in radians)
azm
Sun azimuth (in radians)
pvBackSH
Decimal fraction of the back surface of the PV panel that is shaded,
0.0 to 1.0
rearGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments (W/m2)
frontGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments (W/m2)
frontReflected : array of size [cellRows]
Irradiance reflected from the front of the PV module/panel (W/m2) in
the row behind the one of interest
offset
Offset of reference cell from PV module back (in PV panel slope
lengths), set to zero for PV module cell irradiances
Returns
-------
backGTI : array of size [cellRows]
AOI corrected irradiance on back side of PV module/panel, one for each
cell row (W/m2)
aveGroundGHI : numeric
Average GHI on ground under PV array
Notes
-----
1-degree hemispherical segment AOI correction factor for glass (index=0)
and ARglass (index=1)
"""
backGTI = []
SegAOIcor = [
[0.057563, 0.128570, 0.199651, 0.265024, 0.324661, 0.378968, 0.428391, 0.473670, 0.514788, 0.552454,
0.586857, 0.618484, 0.647076, 0.673762, 0.698029, 0.720118, 0.740726, 0.759671, 0.776946, 0.792833,
0.807374, 0.821010, 0.833534, 0.845241, 0.855524, 0.865562, 0.874567, 0.882831, 0.890769, 0.897939,
0.904373, 0.910646, 0.916297, 0.921589, 0.926512, 0.930906, 0.935179, 0.939074, 0.942627, 0.946009,
0.949096, 0.952030, 0.954555, 0.957157, 0.959669, 0.961500, 0.963481, 0.965353, 0.967387, 0.968580,
0.970311, 0.971567, 0.972948, 0.974114, 0.975264, 0.976287, 0.977213, 0.978142, 0.979057, 0.979662,
0.980460, 0.981100, 0.981771, 0.982459, 0.982837, 0.983199, 0.983956, 0.984156, 0.984682, 0.985026,
0.985364, 0.985645, 0.985954, 0.986241, 0.986484, 0.986686, 0.986895, 0.987043, 0.987287, 0.987388,
0.987541, 0.987669, 0.987755, 0.987877, 0.987903, 0.987996, 0.988022, 0.988091, 0.988104, 0.988114,
0.988114, 0.988104, 0.988091, 0.988022, 0.987996, 0.987903, 0.987877, 0.987755, 0.987669, 0.987541,
0.987388, 0.987287, 0.987043, 0.986895, 0.986686, 0.986484, 0.986240, 0.985954, 0.985645, 0.985364,
0.985020, 0.984676, 0.984156, 0.983956, 0.983199, 0.982837, 0.982459, 0.981771, 0.981100, 0.980460,
0.979662, 0.979057, 0.978142, 0.977213, 0.976287, 0.975264, 0.974114, 0.972947, 0.971567, 0.970311,
0.968580, 0.967387, 0.965353, 0.963481, 0.961501, 0.959671, 0.957157, 0.954555, 0.952030, 0.949096,
0.946009, 0.942627, 0.939074, 0.935179, 0.930906, 0.926512, 0.921589, 0.916297, 0.910646, 0.904373,
0.897939, 0.890769, 0.882831, 0.874567, 0.865562, 0.855524, 0.845241, 0.833534, 0.821010, 0.807374,
0.792833, 0.776946, 0.759671, 0.740726, 0.720118, 0.698029, 0.673762, 0.647076, 0.618484, 0.586857,
0.552454, 0.514788, 0.473670, 0.428391, 0.378968, 0.324661, 0.265024, 0.199651, 0.128570, 0.057563],
[0.062742, 0.139913, 0.216842, 0.287226, 0.351055, 0.408796, 0.460966, 0.508397, 0.551116, 0.589915,
0.625035, 0.657029, 0.685667, 0.712150, 0.735991, 0.757467, 0.777313, 0.795374, 0.811669, 0.826496,
0.839932, 0.852416, 0.863766, 0.874277, 0.883399, 0.892242, 0.900084, 0.907216, 0.914023, 0.920103,
0.925504, 0.930744, 0.935424, 0.939752, 0.943788, 0.947313, 0.950768, 0.953860, 0.956675, 0.959339,
0.961755, 0.964039, 0.965984, 0.967994, 0.969968, 0.971283, 0.972800, 0.974223, 0.975784, 0.976647,
0.977953, 0.978887, 0.979922, 0.980773, 0.981637, 0.982386, 0.983068, 0.983759, 0.984436, 0.984855,
0.985453, 0.985916, 0.986417, 0.986934, 0.987182, 0.987435, 0.988022, 0.988146, 0.988537, 0.988792,
0.989043, 0.989235, 0.989470, 0.989681, 0.989857, 0.990006, 0.990159, 0.990263, 0.990455, 0.990515,
0.990636, 0.990731, 0.990787, 0.990884, 0.990900, 0.990971, 0.990986, 0.991042, 0.991048, 0.991057,
0.991057, 0.991048, 0.991042, 0.990986, 0.990971, 0.990900, 0.990884, 0.990787, 0.990731, 0.990636,
0.990515, 0.990455, 0.990263, 0.990159, 0.990006, 0.989857, 0.989681, 0.989470, 0.989235, 0.989043,
0.988787, 0.988532, 0.988146, 0.988022, 0.987435, 0.987182, 0.986934, 0.986417, 0.985916, 0.985453,
0.984855, 0.984436, 0.983759, 0.983068, 0.982386, 0.981637, 0.980773, 0.979920, 0.978887, 0.977953,
0.976647, 0.975784, 0.974223, 0.972800, 0.971284, 0.969970, 0.967994, 0.965984, 0.964039, 0.961755,
0.959339, 0.956675, 0.953860, 0.950768, 0.947313, 0.943788, 0.939752, 0.935424, 0.930744, 0.925504,
0.920103, 0.914023, 0.907216, 0.900084, 0.892242, 0.883399, 0.874277, 0.863766, 0.852416, 0.839932,
0.826496, 0.811669, 0.795374, 0.777313, 0.757467, 0.735991, 0.712150, 0.685667, 0.657029, 0.625035,
0.589915, 0.551116, 0.508397, 0.460966, 0.408796, 0.351055, 0.287226, 0.216842, 0.139913, 0.062742]]
# Tilt from horizontal of the PV modules/panels, in radians
beta = beta * DTOR
sazm = sazm * DTOR # Surface azimuth of PV module/panels, in radians
# 1. Calculate and assign various paramters to be used for modeling
# irradiances
# For calling PerezComp to break diffuse into components for zero tilt
# (horizontal)
iso_dif = 0.0; circ_dif = 0.0; horiz_dif = 0.0; grd_dif = 0.0; beam = 0.0
# Call to get iso_dif for horizontal surface
ghi, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(
dni, dhi, albedo, zen, 0.0, zen)
# Isotropic irradiance from sky on horizontal surface, used later for
# determining isotropic sky component
iso_sky_dif = iso_dif
# For calling PerezComp to break diffuse into components for 90 degree tilt
# (vertical)
inc, tiltr, sazmr = sunIncident(0, 90.0, 180.0, 45.0, zen, azm)
# Call to get horiz_dif for vertical surface
vti, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(
dni, dhi, albedo, inc, tiltr, zen)
# Horizon diffuse irradiance on a vertical surface, used later for
# determining horizon brightening irradiance component
F2DHI = horiz_dif
index = -99
n2 = -99.9
if (PVbackSurface == "glass"):
# Index to use with 1-degree hemispherical segment AOI correction
# factor array
index = 0
n2 = 1.526 # Index of refraction for glass
elif (PVbackSurface == "ARglass"):
# Index to use with 1-degree hemispherical segment AOI correction
# factor array
index = 1
n2 = 1.300 # Index of refraction for ARglass
else:
raise Exception(
"Incorrect text input for PVbackSurface."
" Must be glass or ARglass.")
# Reflectance at normal incidence, Duffie and Beckman p217
Ro = math.pow((n2 - 1.0) / (n2 + 1.0), 2.0)
# Average GHI on ground under PV array for cases when x projection exceed
# 2*rtr
aveGroundGHI = 0.0
for i in range(0,100):
aveGroundGHI += rearGroundGHI[i] / 100.0
# Calculate x,y coordinates of bottom and top edges of PV row in back of desired PV row so that portions of sky and ground viewed by the
# PV cell may be determined. Origin of x-y axis is the ground pobelow the lower front edge of the desired PV row. The row in back of
# the desired row is in the positive x direction.
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
PbotX = rtr; # x value for poon bottom egde of PV module/panel of row in back of (in PV panel slope lengths)
PbotY = C; # y value for poon bottom egde of PV module/panel of row in back of (in PV panel slope lengths)
PtopX = rtr + x1; # x value for poon top egde of PV module/panel of row in back of (in PV panel slope lengths)
PtopY = h + C; # y value for poon top egde of PV module/panel of row in back of (in PV panel slope lengths)
# 2. Calculate diffuse and direct component irradiances for each cell row
for i in range (0, cellRows):
# Calculate diffuse irradiances and reflected amounts for each cell row over it's field of view of 180 degrees,
# beginning with the angle providing the upper most view of the sky (j=0)
#PcellX = x1 * (i + 0.5) / ((double)cellRows); # x value for location of PV cell
#PcellY = C + h * (i + 0.5) / ((double)cellRows); # y value for location of PV cell
PcellX = x1 * (i + 0.5) / (cellRows) + offset * math.sin(beta); # x value for location of PV cell with OFFSET FOR SARA REFERENCE CELLS 4/26/2016
PcellY = C + h * (i + 0.5) / (cellRows) - offset * math.cos(beta); # y value for location of PV cell with OFFSET FOR SARA REFERENCE CELLS 4/26/2016
elvUP = math.atan((PtopY - PcellY) / (PtopX - PcellX)); # Elevation angle up from PV cell to top of PV module/panel, radians
elvDOWN = math.atan((PcellY - PbotY) / (PbotX - PcellX)); # Elevation angle down from PV cell to bottom of PV module/panel, radians
if (rowType == "last" or rowType == "single"): # 4/19/16 No array to the rear for these cases
elvUP = 0.0;
elvDOWN = 0.0;
#Console.WriteLine("ElvUp = 0", elvUP / DTOR);
#if (i == 0)
# Console.WriteLine("ElvDown = 0", elvDOWN / DTOR);
#123
#iStopIso = Convert.ToInt32((beta - elvUP) / DTOR); # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
#iHorBright = Convert.ToInt32(max(0.0, 6.0 - elvUP / DTOR)); # Number of whole degrees for which horizon brightening occurs
#iStartGrd = Convert.ToInt32((beta + elvDOWN) / DTOR); # First whole degree in arc range that sees ground, last is 180
iStopIso = int(round((beta - elvUP) / DTOR)); # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
iHorBright = int(round(max(0.0, 6.0 - elvUP / DTOR))); # Number of whole degrees for which horizon brightening occurs
iStartGrd = int(round((beta + elvDOWN) / DTOR)); # First whole degree in arc range that sees ground, last is 180
backGTI.append(0.0) # Initialtize front GTI
for j in range (0, iStopIso): # Add sky diffuse component and horizon brightening if present
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * iso_sky_dif; # Sky radiation
# backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * iso_sky_dif; # Sky radiation
if ((iStopIso - j) <= iHorBright): # Add horizon brightening term if seen
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
#backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
if (rowType == "interior" or rowType == "first"): # 4/19/16 Only add reflections from PV modules for these cases
for j in range (iStopIso, iStartGrd): #j = iStopIso; j < iStartGrd; j++) # Add relections from PV module front surfaces
L = (PbotX - PcellX) / math.cos(elvDOWN); # Diagonal distance from cell to bottom of module in row behind
startAlpha = -(j - iStopIso) * DTOR + elvUP + elvDOWN;
stopAlpha = -(j + 1 - iStopIso) * DTOR + elvUP + elvDOWN;
m = L * math.sin(startAlpha);
theta = math.pi - elvDOWN - (math.pi / 2.0 - startAlpha) - beta;
projectedX2 = m / math.cos(theta); # Projected distance on sloped PV module
m = L * math.sin(stopAlpha);
theta = math.pi - elvDOWN - (math.pi / 2.0 - stopAlpha) - beta;
projectedX1 = m / math.cos(theta); # Projected distance on sloped PV module
projectedX1 = max(0.0, projectedX1);
#Console.WriteLine("j= 0 projected X1 = 1,6:0.000 projected X2 = 2,6:0.000", j, projectedX1, projectedX2);
PVreflectedIrr = 0.0; # Irradiance from PV module front cover reflections
deltaCell = 1.0 / cellRows; # Length of cell in sloped direction in module/panel units (dimensionless)
for k in range (0, cellRows): # Determine which cells in behind row are seen, and their reflected irradiance
cellBot = k * deltaCell; # Position of bottom of cell along PV module/panel
cellTop = (k + 1) * deltaCell; # Position of top of cell along PV module/panel
cellLengthSeen = 0.0; # Length of cell seen for this row, start with zero
if (cellBot >= projectedX1 and cellTop <= projectedX2):
cellLengthSeen = cellTop - cellBot; # Sees the whole cell
elif (cellBot <= projectedX1 and cellTop >= projectedX2):
cellLengthSeen = projectedX2 - projectedX1; # Sees portion in the middle of cell
elif (cellBot >= projectedX1 and projectedX2 > cellBot and cellTop >= projectedX2):
cellLengthSeen = projectedX2 - cellBot; # Sees bottom of cell
elif (cellBot <= projectedX1 and projectedX1 < cellTop and cellTop <= projectedX2):
cellLengthSeen = cellTop - projectedX1; # Sees top of cell
#Console.WriteLine("cell= 0 cellBot = 1,5:0.00 cellTop = 2,5:0.00 Cell length seen = 3,5:0.00", k, cellBot, cellTop, cellLengthSeen);
PVreflectedIrr += cellLengthSeen * frontReflected[k]; # Add reflected radiation for this PV cell, if seen, weight by cell length seen
PVreflectedIrr /= projectedX2 - projectedX1; # Reflected irradiance from PV modules (W/m2)
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * PVreflectedIrr; # Radiation reflected from PV module surfaces onto back surface of module
# End of adding reflections from PV module surfaces
#Console.WriteLine("");
#if (i == 0)
#Console.WriteLine("iStartGrd = 0", iStartGrd);
for j in range (iStartGrd, 180): # Add ground reflected component
startElvDown = (j - iStartGrd) * DTOR + elvDOWN; # Start and ending down elevations for this j loop
stopElvDown = (j + 1 - iStartGrd) * DTOR + elvDOWN;
projectedX2 = PcellX + np.float64(PcellY) / math.tan(startElvDown); # Projection of ElvDown to ground in +x direction (X1 and X2 opposite nomenclature for front irradiance method)
projectedX1 = PcellX + PcellY / math.tan(stopElvDown);
actualGroundGHI = 0.0; # Actuall ground GHI from summing array values
#if (i == 0)
# Console.WriteLine("j= 0 projected X1 = 1,6:0.0", j, 100 * projectedX1 / rtr);
if (abs(projectedX1 - projectedX2) > 0.99 * rtr):
if (rowType == "last" or rowType == "single"): # 4/19/16 No array to rear for these cases
actualGroundGHI = ghi; # Use total value if projection approximates the rtr
else:
actualGroundGHI = aveGroundGHI; # Use average value if projection approximates the rtr
else:
projectedX1 = 100.0 * projectedX1 / rtr; # Normalize projections and multiply by 100
projectedX2 = 100.0 * projectedX2 / rtr;
#Console.WriteLine("projectedX1 = 0 projectedX2 = 1", projectedX1, projectedX2);
if ((rowType == "last" or rowType == "single") and (abs(projectedX1) > 99.0 or abs(projectedX2) > 99.0)): #4/19/2016
actualGroundGHI = ghi; # Use total value if projection > rtr for "last" or "single"
else:
while (projectedX1 >= 100.0 or projectedX2 >= 100.0): # Offset so array indexes are less than 100
projectedX1 -= 100.0;
projectedX2 -= 100.0;
while (projectedX1 < -100.0 or projectedX2 < -100.0): # Offset so array indexes are >= -100.0 12/13/2016
projectedX1 += 100.0;
projectedX2 += 100.0;
#Console.WriteLine("projectedX1 = 0 projectedX2 = 1", projectedX1, projectedX2);
index1 = (int)(projectedX1 + 100.0) - 100; # Determine indexes for use with rearGroundGHI array and frontGroundGHI array(truncates values)
index2 = (int)(projectedX2 + 100.0) - 100; # (int)(1.9) = 1 and (int)(-1.9) = -1; (int)(1.9+100) - 100 = 1 and (int)(-1.9+100) - 100 = -2
#Console.WriteLine("index1=0 index2=1", index1, index2);
if (index1 == index2):
if (index1 < 0):
actualGroundGHI = frontGroundGHI[index1 + 100];
#actualGroundGHI = 0.0;
else:
actualGroundGHI = rearGroundGHI[index1]; # x projections in same groundGHI element THIS SEEMS TO ADD HICCUP 4/26/2016 ***************************
#actualGroundGHI = 0.0;
else:
for k in range (index1, index2+1): #for (k = index1; k <= index2; k++) # Sum the irradiances on the ground if projections are in different groundGHI elements
if (k == index1):
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100] * (k + 1.0 - projectedX1);
else:
actualGroundGHI += rearGroundGHI[k] * (k + 1.0 - projectedX1);
elif (k == index2):
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100] * (projectedX2 - k);
else:
actualGroundGHI += rearGroundGHI[k] * (projectedX2 - k);
else:
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100];
else:
actualGroundGHI += rearGroundGHI[k];
actualGroundGHI /= projectedX2 - projectedX1; # Irradiance on ground in the 1 degree field of view
#if (i == 0)
# Console.WriteLine("j=0 index1=1 index2=2 projectX1=3,5:0.0 projectX2=4,5:0.0 actualGrdGHI=5,6:0.0", j, index1, index2, projectedX1, projectedX2, actualGroundGHI);
# End of if looping to determine actualGroundGHI
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * actualGroundGHI * albedo; # Add ground reflected component
#Console.WriteLine("actualGroundGHI = 0,6:0.0 inputGHI = 1,6:0.0 aveArrayGroundGHI = 2,6:0.0", actualGroundGHI, dhi + dni * math.cos(zen), aveGroundGHI);
# End of j loop for adding ground reflected componenet
# Calculate and add direct and circumsolar irradiance components
inc, tiltr, sazmr = sunIncident(0, 180-beta / DTOR, sazm / DTOR - 180, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for downward facing tilt
gtiAllpc, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get components for the tilt
cellShade = pvBackSH * cellRows - i;
if (cellShade > 1.0): # Fully shaded if > 1, no shade if < 0, otherwise fractionally shaded
cellShade = 1.0;
elif (cellShade < 0.0):
cellShade = 0.0;
if (cellShade < 1.0 and inc < math.pi / 2.0): # Cell not shaded entirely and inc < 90 deg
cor = aOIcorrection(n2, inc); # Get AOI correction for beam and circumsolar
backGTI[i] += (1.0 - cellShade) * (beam + circ_dif) * cor; # Add beam and circumsolar radiation
# End of for i = 0; i < cellRows loop
return backGTI, aveGroundGHI;
# End of GetBackSurfaceIrradiances
def getFrontSurfaceIrradiances(rowType, maxShadow, PVfrontSurface, beta, sazm,
dni, dhi, C, D, albedo, zen, azm, cellRows,
pvFrontSH, frontGroundGHI):
"""
This method calculates the AOI corrected irradiance on the front of the PV
module/panel and the irradiance reflected from the the front of the PV
module/panel. 11/12/2015
Added row type and MaxShadow and changed code to accommodate 4/19/2015
Parameters
----------
rowType : str
Type of row: "first", "interior", "last", or "single"
maxShadow
Maximum shadow length projected to the front (-) or rear (+) from the
front of the module row (in PV panel slope lengths), only used for
`rowTypes` other than "interior"
PVfrontSurface
PV module front surface material type, either "glass" or "ARglass"
beta
Tilt from horizontal of the PV modules/panels (deg)
sazm
Surface azimuth of PV panels (deg)
dni
Direct normal irradiance (W/m2)
dhi
Diffuse horizontal irradiance (W/m2)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
albedo
Ground albedo
zen
Sun zenith (in radians)
azm
Sun azimuth (in radians)
pvFrontSH
Decimal fraction of the front surface of the PV panel that is shaded,
0.0 to 1.0
froutGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments in front
of the module row
Returns
-------
frontGTI : array of size [cellRows]
AOI corrected irradiance on front side of PV module/panel, one for each
cell row (W/m2)
frontReflected : array of size [cellRows]
Irradiance reflected from the front of the PV module/panel (W/m2)
aveGroundGHI : numeric
Average GHI on the ground (includes effects of shading by array) from
the array frontGroundGHI[100]
Notes
-----
1-degree hemispherical segment AOI correction factor for glass (index=0)
and ARglass (index=1). Creates a list containing 5 lists, each of 8 items,
all set to 0
"""
frontGTI = []
frontReflected = []
#w, h = 2, 180;
#SegAOIcor = [[0 for x in range(w)] for y in range(h)]
SegAOIcor = ([[0.057563, 0.128570, 0.199651, 0.265024, 0.324661, 0.378968, 0.428391, 0.473670, 0.514788, 0.552454,
0.586857, 0.618484, 0.647076, 0.673762, 0.698029, 0.720118, 0.740726, 0.759671, 0.776946, 0.792833,
0.807374, 0.821010, 0.833534, 0.845241, 0.855524, 0.865562, 0.874567, 0.882831, 0.890769, 0.897939,
0.904373, 0.910646, 0.916297, 0.921589, 0.926512, 0.930906, 0.935179, 0.939074, 0.942627, 0.946009,
0.949096, 0.952030, 0.954555, 0.957157, 0.959669, 0.961500, 0.963481, 0.965353, 0.967387, 0.968580,
0.970311, 0.971567, 0.972948, 0.974114, 0.975264, 0.976287, 0.977213, 0.978142, 0.979057, 0.979662,
0.980460, 0.981100, 0.981771, 0.982459, 0.982837, 0.983199, 0.983956, 0.984156, 0.984682, 0.985026,
0.985364, 0.985645, 0.985954, 0.986241, 0.986484, 0.986686, 0.986895, 0.987043, 0.987287, 0.987388,
0.987541, 0.987669, 0.987755, 0.987877, 0.987903, 0.987996, 0.988022, 0.988091, 0.988104, 0.988114,
0.988114, 0.988104, 0.988091, 0.988022, 0.987996, 0.987903, 0.987877, 0.987755, 0.987669, 0.987541,
0.987388, 0.987287, 0.987043, 0.986895, 0.986686, 0.986484, 0.986240, 0.985954, 0.985645, 0.985364,
0.985020, 0.984676, 0.984156, 0.983956, 0.983199, 0.982837, 0.982459, 0.981771, 0.981100, 0.980460,
0.979662, 0.979057, 0.978142, 0.977213, 0.976287, 0.975264, 0.974114, 0.972947, 0.971567, 0.970311,
0.968580, 0.967387, 0.965353, 0.963481, 0.961501, 0.959671, 0.957157, 0.954555, 0.952030, 0.949096,
0.946009, 0.942627, 0.939074, 0.935179, 0.930906, 0.926512, 0.921589, 0.916297, 0.910646, 0.904373,
0.897939, 0.890769, 0.882831, 0.874567, 0.865562, 0.855524, 0.845241, 0.833534, 0.821010, 0.807374,
0.792833, 0.776946, 0.759671, 0.740726, 0.720118, 0.698029, 0.673762, 0.647076, 0.618484, 0.586857,
0.552454, 0.514788, 0.473670, 0.428391, 0.378968, 0.324661, 0.265024, 0.199651, 0.128570, 0.057563],
[0.062742, 0.139913, 0.216842, 0.287226, 0.351055, 0.408796, 0.460966, 0.508397, 0.551116, 0.589915,
0.625035, 0.657029, 0.685667, 0.712150, 0.735991, 0.757467, 0.777313, 0.795374, 0.811669, 0.826496,
0.839932, 0.852416, 0.863766, 0.874277, 0.883399, 0.892242, 0.900084, 0.907216, 0.914023, 0.920103,
0.925504, 0.930744, 0.935424, 0.939752, 0.943788, 0.947313, 0.950768, 0.953860, 0.956675, 0.959339,
0.961755, 0.964039, 0.965984, 0.967994, 0.969968, 0.971283, 0.972800, 0.974223, 0.975784, 0.976647,
0.977953, 0.978887, 0.979922, 0.980773, 0.981637, 0.982386, 0.983068, 0.983759, 0.984436, 0.984855,
0.985453, 0.985916, 0.986417, 0.986934, 0.987182, 0.987435, 0.988022, 0.988146, 0.988537, 0.988792,
0.989043, 0.989235, 0.989470, 0.989681, 0.989857, 0.990006, 0.990159, 0.990263, 0.990455, 0.990515,
0.990636, 0.990731, 0.990787, 0.990884, 0.990900, 0.990971, 0.990986, 0.991042, 0.991048, 0.991057,
0.991057, 0.991048, 0.991042, 0.990986, 0.990971, 0.990900, 0.990884, 0.990787, 0.990731, 0.990636,
0.990515, 0.990455, 0.990263, 0.990159, 0.990006, 0.989857, 0.989681, 0.989470, 0.989235, 0.989043,
0.988787, 0.988532, 0.988146, 0.988022, 0.987435, 0.987182, 0.986934, 0.986417, 0.985916, 0.985453,
0.984855, 0.984436, 0.983759, 0.983068, 0.982386, 0.981637, 0.980773, 0.979920, 0.978887, 0.977953,
0.976647, 0.975784, 0.974223, 0.972800, 0.971284, 0.969970, 0.967994, 0.965984, 0.964039, 0.961755,
0.959339, 0.956675, 0.953860, 0.950768, 0.947313, 0.943788, 0.939752, 0.935424, 0.930744, 0.925504,
0.920103, 0.914023, 0.907216, 0.900084, 0.892242, 0.883399, 0.874277, 0.863766, 0.852416, 0.839932,
0.826496, 0.811669, 0.795374, 0.777313, 0.757467, 0.735991, 0.712150, 0.685667, 0.657029, 0.625035,
0.589915, 0.551116, 0.508397, 0.460966, 0.408796, 0.351055, 0.287226, 0.216842, 0.139913, 0.062742]]);
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/panels, in radians
# 1. Calculate and assign various paramters to be used for modeling irradiances
iso_dif = 0.0; circ_dif = 0.0; horiz_dif = 0.0; grd_dif = 0.0; beam = 0.0; # For calling PerezComp to break diffuse into components for zero tilt (horizontal)
ghi, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, zen, 0.0, zen) # Call to get iso_dif for horizontal surface
# print "PEREZCOMP1 = "
# print "ghi = ", ghi
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
iso_sky_dif = iso_dif; # Isotropic irradiance from sky on horizontal surface, used later for determining isotropic sky component
inc, tiltr, sazmr = sunIncident(0, 90.0, 180.0, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for 90 degree tilt (vertical)
# print "sunIncident 1."
# print "inc = ", inc
# print "tiltr = ", tiltr
# print "sazmr = ", sazmr
vti, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get horiz_dif for vertical surface
# print "PEREZCOMP1 = "
# print "vti = ", vti
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
F2DHI = horiz_dif; # Horizon diffuse irradiance on a vertical surface, used later for determining horizon brightening irradiance component
index = -99;
n2 = -99.9;
if (PVfrontSurface == "glass"):
index = 0; # Index to use with 1-degree hemispherical segment AOI correction factor array
n2 = 1.526; # Index of refraction for glass
elif (PVfrontSurface == "ARglass"):
index = 1; # Index to use with 1-degree hemispherical segment AOI correction factor array
n2 = 1.300; # Index of refraction for ARglass
else:
raise Exception("Incorrect text input for PVfrontSurface. Must be glass or ARglass.")
Ro = math.pow((n2 - 1.0) / (n2 + 1.0), 2.0); # Reflectance at normal incidence, Duffie and Beckman p217
aveGroundGHI = 0.0; # Average GHI on ground under PV array for cases when x projection exceed 2*rtr
for i in range (0,100):
aveGroundGHI += frontGroundGHI[i] / 100.0;
# Calculate x,y coordinates of bottom and top edges of PV row in front of desired PV row so that portions of sky and ground viewed by the
# PV cell may be determined. Origin of x-y axis is the ground pobelow the lower front edge of the desired PV row. The row in front of
# the desired row is in the negative x direction.
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
PbotX = -rtr; # x value for poon bottom egde of PV module/panel of row in front of (in PV panel slope lengths)
PbotY = C; # y value for poon bottom egde of PV module/panel of row in front of (in PV panel slope lengths)
PtopX = -D; # x value for poon top egde of PV module/panel of row in front of (in PV panel slope lengths)
PtopY = h + C; # y value for poon top egde of PV module/panel of row in front of (in PV panel slope lengths)
# 2. Calculate diffuse and direct component irradiances for each cell row
for i in range (0, cellRows):
# Calculate diffuse irradiances and reflected amounts for each cell row over it's field of view of 180 degrees,
# beginning with the angle providing the upper most view of the sky (j=0)
PcellX = x1 * (i + 0.5) / (cellRows); # x value for location of PV cell
PcellY = C + h * (i + 0.5) / (cellRows); # y value for location of PV cell
elvUP = math.atan((PtopY - PcellY) / (PcellX - PtopX)); # Elevation angle up from PV cell to top of PV module/panel, radians
elvDOWN = math.atan((PcellY - PbotY) / (PcellX - PbotX)); # Elevation angle down from PV cell to bottom of PV module/panel, radians
if (rowType == "first" or rowType == "single"): # 4/19/16 No array in front for these cases
elvUP = 0.0;
elvDOWN = 0.0;
#Console.WriteLine("ElvUp = 0", elvUP / DTOR);
#if (i == 0)
# Console.WriteLine("ElvDown = 0", elvDOWN / DTOR);
if math.isnan(beta):
print( "Beta is Nan")
if math.isnan(elvUP):
print( "elvUP is Nan")
if math.isnan((math.pi - beta - elvUP) / DTOR):
print( "division is Nan")
iStopIso = int(round(np.float64((math.pi - beta - elvUP)) / DTOR)) # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
iHorBright = int(round(max(0.0, 6.0 - elvUP / DTOR))); # Number of whole degrees for which horizon brightening occurs
iStartGrd = int(round((math.pi - beta + elvDOWN) / DTOR)); # First whole degree in arc range that sees ground, last is 180
# print "iStopIso = ", iStopIso
# print "iHorBright = ", iHorBright
# print "iStartGrd = ", iStartGrd
frontGTI.append(0.0) # Initialtize front GTI
frontReflected.append(0.0); # Initialize reflected amount from front
for j in range (0, iStopIso): # Add sky diffuse component and horizon brightening if present
#for (j = 0; j < iStopIso; j++)
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * iso_sky_dif; # Sky radiation
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * iso_sky_dif * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected radiation from module
if ((iStopIso - j) <= iHorBright): # Add horizon brightening term if seen
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * (F2DHI / 0.052264) * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected radiation from module
#if (i == 0)
# Console.WriteLine("iStartGrd = 0", iStartGrd);
for j in range (iStartGrd, 180): # Add ground reflected component
#(j = iStartGrd; j < 180; j++)
startElvDown = (j - iStartGrd) * DTOR + elvDOWN; # Start and ending down elevations for this j loop
stopElvDown = (j + 1 - iStartGrd) * DTOR + elvDOWN;
projectedX1 = PcellX - np.float64(PcellY) / math.tan(startElvDown); # Projection of ElvDown to ground in -x direction
projectedX2 = PcellX - PcellY / math.tan(stopElvDown);
actualGroundGHI = 0.0; # Actuall ground GHI from summing array values
#if (i == 0)
# Console.WriteLine("j= 0 projected X1 = 1,6:0.0", j, 100 * projectedX1 / rtr);
if (abs(projectedX1 - projectedX2) > 0.99 * rtr):
if (rowType == "first" or rowType == "single"): # 4/19/16 No array in front for these cases
actualGroundGHI = ghi; # Use total value if projection approximates the rtr
else:
actualGroundGHI = aveGroundGHI; # Use average value if projection approximates the rtr
else:
projectedX1 = 100.0 * projectedX1 / rtr; # Normalize projections and multiply by 100
projectedX2 = 100.0 * projectedX2 / rtr;
if ((rowType == "first" or rowType == "single") and (abs(projectedX1) > rtr or abs(projectedX2) > rtr)): #4/19/2016
actualGroundGHI = ghi; # Use total value if projection > rtr for "first" or "single"
else:
while (projectedX1 < 0.0 or projectedX2 < 0.0): # Offset so array indexes are positive
projectedX1 += 100.0;
projectedX2 += 100.0;
index1 = int(projectedX1); # Determine indexes for use with groundGHI array (truncates values)
index2 = int(projectedX2);
if (index1 == index2):
actualGroundGHI = frontGroundGHI[index1]; # x projections in same groundGHI element
else:
for k in range (index1, index2+1): # Sum the irradiances on the ground if projections are in different groundGHI elements
#for (k = index1; k <= index2; k++)
#Console.WriteLine("index1=0 index2=1", index1,index2);
if (k == index1):
actualGroundGHI += frontGroundGHI[k] * (k + 1.0 - projectedX1);
elif (k == index2):
if (k < 100):
actualGroundGHI += frontGroundGHI[k] * (projectedX2 - k);
else:
actualGroundGHI += frontGroundGHI[k - 100] * (projectedX2 - k);
else:
if (k < 100):
actualGroundGHI += frontGroundGHI[k];
else:
actualGroundGHI += frontGroundGHI[k - 100];
actualGroundGHI /= projectedX2 - projectedX1; # Irradiance on ground in the 1 degree field of view
#if (i == 0)
# Console.WriteLine("j=0 index1=1 index2=2 projectX1=3,5:0.0 projectX2=4,5:0.0 actualGrdGHI=5,6:0.0", j, index1, index2, projectedX1, projectedX2, actualGroundGHI);
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * actualGroundGHI * albedo; # Add ground reflected component
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * actualGroundGHI * albedo * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected ground radiation from module
#Console.WriteLine("actualGroundGHI = 0,6:0.0 inputGHI = 1,6:0.0 aveArrayGroundGHI = 2,6:0.0", actualGroundGHI, dhi + dni * math.cos(zen), aveGroundGHI);
# End of j loop for adding ground reflected componenet
# Calculate and add direct and circumsolar irradiance components
inc, tiltr, sazmr = sunIncident(0, beta / DTOR, sazm / DTOR, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for 90 degree tilt (vertical)
# print "sunIncident 2."
# print "inc = ", inc
# print "tiltr = ", tiltr
# print "sazmr = ", sazmr
# print " INCIDENT REALY NEEDED for AOI ", inc
gtiAllpc, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get components for the tilt
# print "PEREZCOMP 2 = "
# print "gtiAllpc = ", vti
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
cellShade = pvFrontSH * cellRows - i;
if (cellShade > 1.0): # Fully shaded if > 1, no shade if < 0, otherwise fractionally shaded
cellShade = 1.0;
elif (cellShade < 0.0):
cellShade = 0.0;
if (cellShade < 1.0 and inc < math.pi / 2.0): # Cell not shaded entirely and inc < 90 deg
cor = aOIcorrection(n2, inc); # Get AOI correction for beam and circumsolar
frontGTI[i] += (1.0 - cellShade) * (beam + circ_dif) * cor; # Add beam and circumsolar radiation
#frontReflected[i] += (1.0 - cellShade) * (beam + circ_dif) * (1.0 - cor * (1.0 - Ro)); # Reflected beam and circumsolar radiation from module
# End of for i = 0; i < cellRows loop
return aveGroundGHI, frontGTI, frontReflected;
# End of GetFrontSurfaceIrradiances
def getGroundShadeFactors(rowType, beta, C, D, elv, azm, sazm):
"""
This method determines if the ground is shaded from direct beam radiation
for points on the ground from the leading edge of one row of PV panels to
the leading edge of the next row of PV panels behind it. This row-to-row
dimension is divided into 100 ground segments and a ground shade factor is
returned for each ground segment, with values of 1 for shaded segments and
values of 0 for non shaded segments. The fractional amounts of shading of
the front and back surfaces of the PV panel are also returned. 8/20/2015
4/18/2016 - Modified to account for different row types. Because the ground
factors may now be different depending on row, they are calculated for the
row-to-row dimension to the rear of the leading module edge and to the
front of the leading edge. Also returned is the maximum shadow length
projected to the front or rear from the front of the module row
Parameters
----------
rowType : str
"first", "interior", "last", or "single"
beta
Tilt from horizontal of the PV modules/panels (deg)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
elv
Sun elevation (in radians)
azm
Sun azimuth (in radians)
sazm
Surface azimuth of PV panels (deg)
Returns
-------
pvFrontSH : numeric
Decimal fraction of the front surface of the PV panel that is shaded,
0.0 to 1.0
pvBackSH : numeric
Decimal fraction of the back surface of the PV panel that is shaded,
0.0 to 1.0
rearGroundSH : array of size [100]
Ground shade factors for ground segments to the rear, 0 = not shaded,
1 = shaded
frontGroundSH : array of size [100]
Ground shade factors for ground segments to the front, 0 = not shaded,
1 = shaded
maxShadow : numeric
Maximum shadow length projected to the front(-) or rear (+) from the
front of the module row (in PV panel slope lengths), only used later
for rowTypes other than "interior"
"""
rearGroundSH = []
frontGroundSH = []
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/pamels, in radians
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
# Divide the row-to-row spacing into 100 intervals for calculating ground shade factors
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
Lh = (h / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from top of module to bottom of module
Lhc = ((h + C) / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from top of module to ground level
Lc = (C / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from bottom of module to ground level
ss1 = 0.0; se1 = 0.0; ss2 = 0.0; se2 = 0.0; # Initialize shading start (s) and end (e) to zeros for two potential shading segments
pvFrontSH = 0.0;
pvBackSH = 0.0;
if (rowType == "interior"):
if (Lh > D): # Front side of PV module partially shaded, back completely shaded, ground completely shaded
pvFrontSH = (Lh - D) / (Lh + x1);
pvBackSH = 1.0;
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
elif (Lh < -(rtr + x1)): # Back side of PV module partially shaded, front completely shaded, ground completely shaded
pvFrontSH = 1.0;
pvBackSH = (Lh + rtr + x1) / (Lh + x1);
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
else: # Ground is partially shaded (I assume)
if (Lhc >= 0.0): # Shadow to rear of row, module front unshaded, back shaded
pvFrontSH = 0.0;
pvBackSH = 1.0;
Ss = Lc; # Shadow starts at Lc
Se = Lhc + x1; # Shadow ends here
while (Ss > rtr):
Ss -= rtr; # Put shadow in correct rtr space if needed
Se -= rtr;
ss1 = Ss;
se1 = Se;
if (se1 > rtr): # then need to use two shade areas
se1 = rtr;
ss2 = 0.0;
se2 = Se - rtr;
if (se2 > ss1):
# This would mean ground completely shaded, does this occur?
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
else: # Shadow to front of row, either front or back might be shaded, depending on tilt and other factors
Ss = 0.0; # Shadow starts at Lc, initialize
Se = 0.0; # Shadow ends here, initialize
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
Ss = Lc; # Shadow starts at Lc
Se = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
Ss = Lhc + x1; # Shadow starts at Lhc + x1
Se = Lc; # Shadow ends here
while (Ss < 0.0):
Ss += rtr; # Put shadow in correct rtr space if needed
Se += rtr;
ss1 = Ss;
se1 = Se;
if (se1 > rtr): # then need to use two shade areas
se1 = rtr;
ss2 = 0.0;
se2 = Se - rtr;
if (se2 > ss1):
# This would mean ground completely shaded, does this occur?
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
# End of if (Lh > D) else branching
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
#for (i = 0; i <= 99; i++)
for i in range(0,100):
x += delta;
#if ((x >= ss1 && x < se1) || (x >= ss2 && x < se2)):
if ((x >= ss1 and x < se1) or (x >= ss2 and x < se2)):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
frontGroundSH.append(1); # same for both front and rear
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
frontGroundSH.append(0); # same for both front and rear
#Console.WriteLine("x = 0,6:0.0000 groundSH = 1", x, groundSH[i]);
# End of if row type == "interior"
elif (rowType == "first"):
if (Lh > 0.0): # Sun is on front side of PV module
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Ground shaded from shadow of lower edge
se1 = x1 + Lhc; # to shadow of upper edge
# End of if sun on front side of PV module
elif (Lh < -(rtr + x1)): # Back side of PV module partially shaded from row to rear, front completely shaded, ground completely shaded
pvFrontSH = 1.0;
pvBackSH = (Lh + rtr + x1) / (Lh + x1);
ss1 = -rtr; # Ground shaded from -rtr to rtr
se1 = rtr;
# End of if back side of PV module partially shaded, front completely shaded, ground completely shaded
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1) # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0) # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1) # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0) # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "first"
elif (rowType == "last"):
if (Lh > D): # Front side of PV module partially shaded, back completely shaded, ground completely shaded
pvFrontSH = (Lh - D) / (Lh + x1);
pvBackSH = 1.0;
ss1 = -rtr; # Ground shaded from -rtr to rtr
se1 = rtr;
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "last"
elif (rowType == "single"):
if (Lh > 0.0): # Shadow to the rear
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Ground shaded from shadow of lower edge
se1 = x1 + Lhc; # to shadow of upper edge
# End of if sun on front side of PV module
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "single"
else:
print ("ERROR: Incorrect row type not passed to function GetGroundShadedFactors ");
if (abs(ss1) > abs(se1)): # Maximum shadow length projected from the front of the PV module row
maxShadow = ss1;
else:
maxShadow = se1;
#Console.WriteLine("elv = 0,6:0.00 azm = 1,6:0.00 sazm = 2,6:0.00", elv * 180.0 / math.pi, azm * 180.0 / math.pi, sazm * 180.0 / math.pi);
#Console.WriteLine("ss1 = 0,6:0.0000 se1 = 1,6:0.0000 ss2 = 2,6:0.0000 se2 = 3,6:0.0000 rtr = 4,6:0.000", ss1, se1, ss2, se2, rtr);
#Console.WriteLine("pvFrontSH = 0,6:0.00 pvBackSH = 1,6:0.00", pvFrontSH, pvBackSH);
# End of GetGroundShadedFactors
#print "rearGroundSH", rearGroundSH[0]
return pvFrontSH, pvBackSH, maxShadow, rearGroundSH, frontGroundSH;
# End of getGroundShadeFactors
def getSkyConfigurationFactors(rowType, beta, C, D):
"""
This method determines the sky configuration factors for points on the
ground from the leading edge of one row of PV panels to the leading edge of
the next row of PV panels behind it. This row-to-row dimension is divided
into 100 ground segments and a sky configuration factor is returned for
each ground segment. The sky configuration factor represents the fraction
of the isotropic diffuse sky radiation (unobstructed) that is present on
the ground when partially obstructed by the rows of PV panels. The
equations follow that on pages in the notebook dated 8/12/2015. 8/20/2015
4/15/2016 Modifed for calculations other than just the interior rows. Row
type is identified with the string `rowType`, with the possilbe values:
* first = first row of the array
* interior = interior row of array
* last = last row of the array
* single = a single row array
Because the sky configuration factors may now be different depending on
row, they are calculated for the row-to-row dimension to the rear of the
leading module edge and to the front of the leading edge.
Parameters
----------
rowType : str
"first", "interior", "last", or "single"
beta : float
Tilt from horizontal of the PV modules/panels (deg)
C : float
Ground clearance of PV panel (in PV module/panel slope lengths)
D : float
Horizontal distance between rows of PV panels (in PV module/panel slope
lengths)
Returns
-------
rearSkyConfigFactors : array of size [100]
Sky configuration factors to rear of leading PVmodule edge (decimal
fraction)
frontSkyConfigFactors : array of size [100]
Sky configuration factors to rear of leading PVmodule edge (decimal
fraction)
Notes
-----
The horizontal distance between rows, `D`, is from the back edge of one row
to the front edge of the next, and it is not the row-to-row spacing.
"""
rearSkyConfigFactors = []
frontSkyConfigFactors = []
# Tilt from horizontal of the PV modules/panels, in radians
beta = beta * DTOR
# Vertical height of sloped PV panel (in PV panel slope lengths)
h = math.sin(beta)
# Horizontal distance from front of panel to rear of panel (in PV panel
# slope lengths)
x1 = math.cos(beta)
rtr = D + x1 # Row-to-row distance (in PV panel slope lengths)
# Forced fix for case of C = 0
# FIXME: for some reason the Config Factors go from 1 to 2 and not 0 to 1.
# TODO: investigate why this is happening in the code.
if C==0:
C=0.0000000001
if C < 0:
LOGGER.error(
"Height is below ground level. Function GetSkyConfigurationFactors"
" will continue but results might be unreliable")
# Divide the row-to-row spacing into 100 intervals and calculate
# configuration factors
delta = rtr / 100.0
if (rowType == "interior"):
# Initialize horizontal dimension x to provide midpoint of intervals
x = -delta / 2.0
for i in range(0,100):
x += delta
# <--rtr=x1+D--><--rtr=x1+D--><--rtr=x1+D-->
# |\ |\ |\ |\
# | \ ` | \ | \ /| \
# h \ ` h \ h \ / h \
# | \ ` | \ | \ / | \
# |_x1_\____D__`|_x1_\____D___|_x1_\_/_D____|_x1_\_
# | ` <------x-----/|
# C ` /
# | angA ` / angB
# *------------------------`-/---------------------
# x
# use ATAN2: 4-quadrant tangent instead of ATAN
# check 2 rows away
angA = math.atan2(h + C, (2.0 * rtr + x1 - x))
angB = math.atan2(C, (2.0 * rtr - x))
beta1 = max(angA, angB)
# check 1 rows away
angA = math.atan2(h + C, (rtr + x1 - x))
angB = math.atan2(C, (rtr - x))
beta2 = min(angA, angB)
# check 0 rows away
beta3 = max(angA, angB)
beta4 = math.atan2(h + C, (x1 - x))
beta5 = math.atan2(C, (-x))
beta6 = math.atan2(h + C, (-D - x))
sky1 =0; sky2 =0; sky3 =0
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2))
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4))
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6))
skyAll = sky1 + sky2 + sky3
# Save as arrays of values, same for both to the rear and front
rearSkyConfigFactors.append(skyAll)
frontSkyConfigFactors.append(skyAll)
# End of if "interior"
elif (rowType == "first"):
# RearSkyConfigFactors don't have a row in front, calculation of sky3
# changed, beta6 = 180 degrees
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.pi;
sky1 = 0.0; sky2 = 0.0; sky3 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky1 + sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# frontSkyConfigFactors don't have a row in front, calculation of sky3 included as part of revised sky2,
# beta 4 set to 180 degrees
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.pi;
sky1 = 0.0; sky2 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
skyAll = sky1 + sky2;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "first"
elif (rowType == "last"):
# RearSkyConfigFactors don't have a row to the rear, combine sky1 into sky 2, set beta 3 = 0.0
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
beta3 = 0.0;
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.atan((h + C) / (-D - x));
if (beta6 < 0.0):
beta6 += math.pi;
sky2 = 0.0; sky3 = 0.0;
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# FrontSkyConfigFactors have beta1 = 0.0
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
beta1 = 0.0;
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.atan((h + C) / (-D - x));
if (beta6 < 0.0):
beta6 += math.pi;
sky1 = 0.0; sky2 = 0.0; sky3 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky1 + sky2 + sky3;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values,
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "last" row
elif (rowType == "single"):
# RearSkyConfigFactors don't have a row to the rear ir front, combine sky1 into sky 2, set beta 3 = 0.0,
# for sky3, beta6 = 180.0.
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
beta3 = 0.0;
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.pi;
sky2 = 0.0; sky3 = 0.0;
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# FrontSkyConfigFactors have only a row to the rear, combine sky3 into sky2, set beta1 = 0, beta4 = 180
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
beta1 = 0.0;
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.pi;
sky1 = 0.0; sky2 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
skyAll = sky1 + sky2;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "single"
else:
print("ERROR: Incorrect row type not passed to function GetSkyConfigurationFactors ");
return rearSkyConfigFactors, frontSkyConfigFactors;
# End of GetSkyConfigurationFactors
def rowSpacing(beta, sazm, lat, lng, tz, hour, minute):
"""
This method determines the horizontal distance D between rows of PV panels
(in PV module/panel slope lengths) for no shading on December 21 (north
hemisphere) June 21 (south hemisphere) for a module tilt angle beta and
surface azimuth sazm, and a given latitude, longitude, and time zone and
for the time passed to the method (typically 9 am).
(Ref: the row-to-row spacing is then ``D + cos(beta)``)
8/21/2015
Parameters
----------
beta : double
Tilt from horizontal of the PV modules/panels (deg)
sazm : double
Surface azimuth of the PV modules/panels (deg)
lat : double
Site latitude (deg)
lng : double
Site longitude (deg)
tz : double
Time zone (hrs)
hour : int
hour for no shading criteria
minute: double
minute for no shading
Returns
-------
D : numeric
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
"""
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/pamels, in radians
if lat >= 0:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos (2014, 12, 21, hour, minute, lat, lng, tz)
else:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos (2014, 6, 21, hour, minute, lat, lng, tz)
tst = 8.877 ##DLL Forced value
minute -= 60.0 * (tst - hour); # Adjust minute so sun position is calculated for a tst equal to the
# time passed to the function
if lat >= 0:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos(2014, 12, 21, hour, minute, lat, lng, tz)
else:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos(2014, 6, 21, hour, minute, lat, lng, tz)
# Console.WriteLine("tst = {0} azm = {1} elv = {2}", tst, azm * 180.0 / Math.PI, elv * 180.0 / Math.PI);
D = math.cos(sazm - azm) * math.sin(beta) / math.tan(elv)
return D
# End of RowSpacing
def trackingBFvaluescalculator(beta, hub_height, r2r):
'''
1-axis tracking helper file
Parameters
----------
beta : float
Tilt from horizontal of the PV modules/panels, in radians
hub_height : float
tracker hub height
r2r : float
Row-to-row distance (in PV panel slope lengths)
Returns
-------
C : float
ground clearance of PV panel
D : float
row-to-row distance (each in PV panel slope lengths)
'''
# Created on Tue Jun 13 08:01:56 2017
# @author: sayala
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
#rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
D = r2r - x1; # Calculates D DistanceBetweenRows(panel slope lengths)
hm = 0.5*math.sin(beta); # vertical distance from bottom of panel to top of panel (in PV panel slope lengths)
#C = 0.5+Cv-hm # Ground clearance of PV panel (in PV panel slope lengths).
C = hub_height - hm #Adding a 0.5 for half a panel slope length, since it is assumed the panel is rotating around its middle axis
return C, D
| 1.929688 | 2 |
tests/test_slison.py | Habidatum/slisonner | 2 | 6717 | <reponame>Habidatum/slisonner
from slisonner import decoder, encoder
from tests import mocker
from tempfile import mkdtemp
from shutil import rmtree
def test_full_encode_decode_cycle():
temp_out_dir = mkdtemp()
slice_id = '2015-01-02 00:00:00'
x_size, y_size = 10, 16
temp_slice_path = mocker.generate_slice(x_size, y_size, 'float32')
slice_meta_encoded, slison_filepath = encoder.encode_slice_file(
filepath=temp_slice_path,
slice_duration=300,
timestamp=slice_id,
layer_id='london',
x_size=x_size,
y_size=y_size,
value_type='float32',
out_dir=temp_out_dir)
slice_data, slice_meta_decoded = decoder.decode_slison(slison_filepath)
for key, encoded_value in slice_meta_encoded.items():
assert encoded_value == slice_meta_decoded[key]
rmtree(temp_out_dir)
| 2.3125 | 2 |
cartrade/cartrade/doctype/category/category.py | vignesharumainayagam/cartrade | 0 | 6718 | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Tridots Tech Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.website.website_generator import WebsiteGenerator
class Category(WebsiteGenerator):
def validate(self):
if ' ' in self.category_name:
self.route = self.category_name.replace(" ", "").lower() | 2.171875 | 2 |
exercises/pyfiles/ex812_polarsincos.py | TUDelft-AE-Python/ae1205-exercises | 1 | 6719 | import matplotlib.pyplot as plt
import math
xtab = []
ytab = []
for i in range(0, 628):
# Calculate polar coordinates for provided equation
phi = float(i) / 100.0
r = 4 * math.cos(2 * phi)
# Convert to Cartesian and store in lists
x = r * math.cos(phi)
y = r * math.sin(phi)
xtab.append(x)
ytab.append(y)
plt.plot(xtab, ytab)
plt.show() | 3.15625 | 3 |
vendor/packages/logilab-astng/__pkginfo__.py | jgmize/kitsune | 2 | 6720 | # Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:<EMAIL>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:<EMAIL>
# copyright 2003-2010 <NAME>, all rights reserved.
# contact mailto:<EMAIL>
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""
logilab.astng packaging information
"""
distname = 'logilab-astng'
modname = 'astng'
subpackage_of = 'logilab'
numversion = (0, 20, 1)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.49.0']
pyversions = ["2.3", "2.4", "2.5", '2.6']
license = 'LGPL'
author = 'Logilab'
author_email = '<EMAIL>'
mailinglist = "mailto://%s" % author_email
web = "http://www.logilab.org/project/%s" % distname
ftp = "ftp://ftp.logilab.org/pub/%s" % modname
short_desc = "rebuild a new abstract syntax tree from Python's ast"
long_desc = """The aim of this module is to provide a common base \
representation of python source code for projects such as pychecker, pyreverse,
pylint... Well, actually the development of this library is essentially
governed by pylint's needs.
It rebuilds the tree generated by the compiler.ast [1] module (python <= 2.4)
or by the builtin _ast module (python >= 2.5) by recursively walking down the
AST and building an extended ast (let's call it astng ;). The new node classes
have additional methods and attributes for different usages.
Furthermore, astng builds partial trees by inspecting living objects."""
from os.path import join
include_dirs = [join('test', 'regrtest_data'),
join('test', 'data'), join('test', 'data2')]
| 1.132813 | 1 |
W-DCGAN/model.py | lmyybh/pytorch-networks | 0 | 6721 | <filename>W-DCGAN/model.py
import torch
import torch.nn as nn
class Generator(nn.Module):
def __init__(self, signal_size, out_channels=3):
super(Generator, self).__init__()
self.linear = nn.Linear(signal_size, 1024*4*4)
convs = []
channels = [1024, 512, 256, 128]
for i in range(1, len(channels)):
convs.append(nn.ConvTranspose2d(channels[i-1], channels[i], 2, stride=2))
convs.append(nn.BatchNorm2d(channels[i]))
convs.append(nn.LeakyReLU(0.2, inplace=True))
convs.append(nn.ConvTranspose2d(channels[-1], out_channels, 2, stride=2))
convs.append(nn.Tanh())
self.convs = nn.Sequential(*convs)
def forward(self, x):
x = self.linear(x)
x = x.view(x.size(0), 1024, 4, 4)
x = self.convs(x)
return x
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
channels = [3, 32, 64, 128, 256, 512, 1024]
convs = []
for i in range(1, len(channels)):
convs.append(nn.Conv2d(channels[i-1], channels[i], 3, padding=1, stride=2))
convs.append(nn.BatchNorm2d(channels[i]))
convs.append(nn.LeakyReLU(0.2, inplace=True))
self.convs = nn.Sequential(*convs)
self.linear = nn.Linear(1024*1*1, 1)
def forward(self, x):
x = self.convs(x)
x = x.view(x.size(0), -1)
x = self.linear(x)
# x = torch.sigmoid(x)
return x
| 2.296875 | 2 |
bioinformatics/analysis/rnaseq/prepare/split_gtf_by_type.py | bioShaun/omsCabinet | 0 | 6722 | import fire
import gtfparse
from pathlib import Path
GENCODE_CATEGORY_MAP = {
'IG_C_gene': 'protein_coding',
'IG_D_gene': 'protein_coding',
'IG_J_gene': 'protein_coding',
'IG_V_gene': 'protein_coding',
'IG_LV_gene': 'protein_coding',
'TR_C_gene': 'protein_coding',
'TR_J_gene': 'protein_coding',
'TR_V_gene': 'protein_coding',
'TR_D_gene': 'protein_coding',
'TEC': 'protein_coding',
'nonsense_mediated_decay': 'protein_coding',
'non_stop_decay': 'protein_coding',
'retained_intron': 'lncRNA',
'protein_coding': 'protein_coding',
'ambiguous_orf': 'lncRNA',
'Mt_rRNA': 'ncRNA',
'Mt_tRNA': 'ncRNA',
'miRNA': 'ncRNA',
'misc_RNA': 'ncRNA',
'rRNA': 'ncRNA',
'snRNA': 'ncRNA',
'snoRNA': 'ncRNA',
'ribozyme': 'ncRNA',
'sRNA': 'ncRNA',
'scaRNA': 'ncRNA',
'scRNA': 'ncRNA',
'non_coding': 'lncRNA',
'known_ncrna': 'ncRNA',
'3prime_overlapping_ncrna': 'lncRNA',
'3prime_overlapping_ncRNA': 'lncRNA',
'vaultRNA': 'ncRNA',
'processed_transcript': 'lncRNA',
'lincRNA': 'lncRNA',
'macro_lncRNA': 'lncRNA',
'sense_intronic': 'lncRNA',
'sense_overlapping': 'lncRNA',
'antisense': 'lncRNA',
'antisense_RNA': 'lncRNA',
'bidirectional_promoter_lncRNA': 'lncRNA',
'IG_pseudogene': 'pseudogene',
'IG_D_pseudogene': 'pseudogene',
'IG_C_pseudogene': 'pseudogene',
'IG_J_pseudogene': 'pseudogene',
'IG_V_pseudogene': 'pseudogene',
'TR_V_pseudogene': 'pseudogene',
'TR_J_pseudogene': 'pseudogene',
'Mt_tRNA_pseudogene': 'pseudogene',
'tRNA_pseudogene': 'pseudogene',
'snoRNA_pseudogene': 'pseudogene',
'snRNA_pseudogene': 'pseudogene',
'scRNA_pseudogene': 'pseudogene',
'rRNA_pseudogene': 'pseudogene',
'misc_RNA_pseudogene': 'pseudogene',
'miRNA_pseudogene': 'pseudogene',
'pseudogene': 'pseudogene',
'processed_pseudogene': 'pseudogene',
'polymorphic_pseudogene': 'pseudogene',
'retrotransposed': 'pseudogene',
'transcribed_processed_pseudogene': 'pseudogene',
'transcribed_unprocessed_pseudogene': 'pseudogene',
'transcribed_unitary_pseudogene': 'pseudogene',
'translated_processed_pseudogene': 'pseudogene',
'translated_unprocessed_pseudogene': 'pseudogene',
'unitary_pseudogene': 'pseudogene',
'unprocessed_pseudogene': 'pseudogene',
'novel_lncRNA': 'lncRNA',
'TUCP': 'TUCP',
'lncRNA': 'lncRNA'
}
def simplify_gene_type(gene_type):
if gene_type in GENCODE_CATEGORY_MAP:
sim_type = GENCODE_CATEGORY_MAP.get(gene_type)
if sim_type == 'lncRNA':
sim_type = f'annotated_{sim_type}'
elif sim_type == 'ncRNA':
sim_type = f'other_{sim_type}'
else:
pass
return sim_type
else:
raise ValueError(gene_type)
def dfline2gtfline(dfline):
basic_inf = dfline[:8]
basic_inf.fillna('.', inplace=True)
basic_inf.frame = '.'
basic_inf_list = [str(each) for each in basic_inf]
basic_inf_line = '\t'.join(basic_inf_list)
attr_inf = dfline[8:]
attr_inf_list = []
for key, val in attr_inf.items():
if val:
attr_inf_list.append(f'{key} "{val}";')
attr_inf_line = ' '.join(attr_inf_list)
return f'{basic_inf_line}\t{attr_inf_line}\n'
def split_gtf(gtf, outdir, novel=False):
gtf_df = gtfparse.read_gtf(gtf)
if 'gene_type' in gtf_df.columns:
gtf_df.loc[:, 'gene_biotype'] = gtf_df.gene_type
gtf_df.drop('gene_type', axis=1, inplace=True)
elif 'gene_biotype' in gtf_df.columns:
pass
else:
gtf_df.loc[:, 'gene_biotype'] = 'protein_coding'
type_label = 'gene_biotype'
if novel:
gtf_df.loc[
:, type_label] = gtf_df.loc[:, type_label].map(
GENCODE_CATEGORY_MAP)
else:
gtf_df.loc[
:, type_label] = gtf_df.loc[:, type_label].map(
simplify_gene_type)
outdir = Path(outdir)
outdir.mkdir(parents=True, exist_ok=True)
for gt, grp in gtf_df.groupby(type_label):
gt_file = outdir / f'{gt}.gtf'
with open(gt_file, 'w') as gt_inf:
for idx in grp.index:
outline = dfline2gtfline(grp.loc[idx])
gt_inf.write(outline)
if __name__ == '__main__':
fire.Fire(split_gtf)
| 1.359375 | 1 |
rational.py | navel0810/chibi | 0 | 6723 | import math
class Q(object):
def __init__(self,a,b=1):
gcd=math.gcd(a,b)
self.a=a//gcd
self.b=b//gcd
def __repr__(self):
if self.b==1:
return str(self.a)
return f'{self.a}/{self.b}'
def __add__(self,q):
a=self.a
b=self.b
c=q.a
d=q.b
return Q(a*d+b*c,b*d)
def __sub__(self,q):
a=self.a
b=self.b
c=q.a
d=q.b
return Q(a*d-b*c,b*d)
def __mul__(self,q):
a=self.a
b=self.b
c=q.a
d=q.b
return Q(a*c,b*d)
def __truediv__(self,q):
a=self.a
b=self.b
c=q.a
d=q.b
return Q(a*d,b*c)
q1=Q(1,2)
q2=Q(1,3)
print(q1/q2) | 3.78125 | 4 |
cities_light/tests/test_import.py | jsandovalc/django-cities-light | 0 | 6724 | from __future__ import unicode_literals
import glob
import os
from dbdiff.fixture import Fixture
from .base import TestImportBase, FixtureDir
from ..settings import DATA_DIR
class TestImport(TestImportBase):
"""Load test."""
def test_single_city(self):
"""Load single city."""
fixture_dir = FixtureDir('import')
self.import_data(
fixture_dir,
'angouleme_country',
'angouleme_region',
'angouleme_subregion',
'angouleme_city',
'angouleme_translations'
)
Fixture(fixture_dir.get_file_path('angouleme.json')).assertNoDiff()
def test_single_city_zip(self):
"""Load single city."""
filelist = glob.glob(os.path.join(DATA_DIR, "angouleme_*.txt"))
for f in filelist:
os.remove(f)
fixture_dir = FixtureDir('import_zip')
self.import_data(
fixture_dir,
'angouleme_country',
'angouleme_region',
'angouleme_subregion',
'angouleme_city',
'angouleme_translations',
file_type="zip"
)
Fixture(FixtureDir('import').get_file_path('angouleme.json')).assertNoDiff()
def test_city_wrong_timezone(self):
"""Load single city with wrong timezone."""
fixture_dir = FixtureDir('import')
self.import_data(
fixture_dir,
'angouleme_country',
'angouleme_region',
'angouleme_subregion',
'angouleme_city_wtz',
'angouleme_translations'
)
Fixture(fixture_dir.get_file_path('angouleme_wtz.json')).assertNoDiff()
from ..loading import get_cities_model
city_model = get_cities_model('City')
cities = city_model.objects.all()
for city in cities:
print(city.get_timezone_info().zone)
| 2.109375 | 2 |
custom_components/hoymiles/__init__.py | Cosik/HAHoymiles | 0 | 6725 | <reponame>Cosik/HAHoymiles<gh_stars>0
import datetime
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_PLANT_ID,
)
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = datetime.timedelta(seconds=600)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_PLANT_ID): cv.string,
}
) | 2.015625 | 2 |
views/auth.py | bluebibi/flask_rest | 0 | 6726 | <reponame>bluebibi/flask_rest
from flask import Blueprint, redirect, render_template, request, flash, session
from database import base
from database.base import User
from forms import UserForm, LoginForm, MyPageUserForm
from flask_login import login_required, login_user, logout_user, current_user
import requests
auth_blueprint = Blueprint('auth', __name__)
kakao_oauth = {}
@auth_blueprint.route('/my_page', methods=['GET', 'POST'])
@login_required
def _user():
form = MyPageUserForm()
q = base.db_session.query(User).filter(User.email == current_user.email)
user = q.first()
if request.method == 'POST':
if form.validate_on_submit():
user.email = request.form['email']
user.name = request.form['name']
user.set_password(request.form['password'])
user.affiliation = request.form['affiliation']
base.db_session.commit()
flash('귀하의 회원정보가 수정 되었습니다.')
return redirect('/auth/my_page')
return render_template("my_page.html", user=user, form=form, kakao_oauth=kakao_oauth)
def login_process(email, password):
q = base.db_session.query(User).filter(User.email == email)
user = q.first()
if user:
if user.authenticate(password):
login_result = login_user(user)
if login_result:
print("사용자(사용자 이메일:{0})의 로그인 성공!".format(current_user.email))
return '/'
else:
flash('비밀번호를 다시 확인하여 입력해주세요.')
return '/auth/login'
else:
flash('이메일 및 비밀번호를 다시 확인하여 입력해주세요.')
return '/auth/login'
@auth_blueprint.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect('/')
form = LoginForm()
if request.method == 'POST':
if form.validate_on_submit():
redirect_url = login_process(form.data['email'], form.data['password'])
return redirect(redirect_url)
return render_template('login.html', form=form, current_user=current_user)
@auth_blueprint.route('kakao_oauth_redirect')
def kakao_oauth_redirect():
code = str(request.args.get('code'))
url = "https://kauth.kakao.com/oauth/token"
data = "grant_type=authorization_code" \
"&client_id=0eb67d9cd0372c01d3915bbd934b4f6d" \
"&redirect_uri=http://localhost:8080/auth/kakao_oauth_redirect" \
"&code={0}".format(code)
headers = {
"Content-Type": "application/x-www-form-urlencoded;charset=utf-8",
"Cache-Control": "no-cache"
}
response = requests.post(
url=url,
data=data,
headers=headers
)
#print("kakao_oauth_redirect", response.json())
kakao_oauth["access_token"] = response.json()["access_token"]
kakao_oauth["expires_in"] = response.json()["expires_in"]
kakao_oauth["refresh_token"] = response.json()["refresh_token"]
kakao_oauth["refresh_token_expires_in"] = response.json()["refresh_token_expires_in"]
kakao_oauth["scope"] = response.json()["scope"]
kakao_oauth["token_type"] = response.json()["token_type"]
if "kaccount_email" not in kakao_oauth or kakao_oauth['kaccount_email'] is None:
kakao_me_and_signup()
redirect_url = login_process(kakao_oauth["kaccount_email"], "1234")
return redirect(redirect_url)
def kakao_me_and_signup():
url = "https://kapi.kakao.com/v1/user/me"
headers = {
"Authorization": "Bearer {0}".format(kakao_oauth["access_token"]),
"Content-Type": "application/x-www-form-urlencoded;charset=utf-8"
}
response = requests.post(
url=url,
headers=headers
)
#print("kakao_me_and_signup", response.json())
kakao_oauth["kaccount_email"] = response.json()["kaccount_email"]
kakao_oauth["id"] = response.json()["id"]
kakao_oauth["kakao_profile_image"] = response.json()["properties"]["profile_image"]
kakao_oauth["nickname"] = response.json()["properties"]["nickname"]
kakao_oauth["kakao_thumbnail_image"] = response.json()["properties"]["thumbnail_image"]
c = base.db_session.query(User).filter(User.email == kakao_oauth["kaccount_email"]).count()
if c == 0:
user = User(name=kakao_oauth["nickname"], email=kakao_oauth["kaccount_email"], affiliation=None)
user.set_password("<PASSWORD>")
base.db_session.add(user)
base.db_session.commit()
def kakao_logout():
url = "https://kapi.kakao.com/v1/user/logout"
headers = {
"Authorization": "Bearer {0}".format(kakao_oauth["access_token"])
}
response = requests.post(
url=url,
headers=headers
)
if response.status_code == 200:
kakao_oauth["kaccount_email"] = None
kakao_oauth["id"] = None
kakao_oauth["kakao_profile_image"] = None
kakao_oauth["nickname"] = None
kakao_oauth["kakao_thumbnail_image"] = None
@auth_blueprint.route("/logout")
@login_required
def logout():
logout_user()
if kakao_oauth and "kaccount_email" in kakao_oauth:
kakao_logout()
return redirect('/')
@auth_blueprint.route('/signup', methods=['GET', 'POST'])
def signup():
form = UserForm()
if request.method == 'POST':
if form.validate_on_submit():
new_user = User()
new_user.email = request.form['email']
new_user.name = request.form['name']
new_user.set_password(request.form['password'])
new_user.affiliation = request.form['affiliation']
base.db_session.add(new_user)
base.db_session.commit()
flash('귀하는 회원가입이 성공적으로 완료되었습니다. 가입하신 정보로 로그인을 다시 하시기 바랍니다.')
return redirect('/auth/login')
return render_template("signup.html", form=form) | 2.578125 | 3 |
doc/.src/book/exer/cable_sin.py | hplgit/fem-book | 86 | 6727 | <reponame>hplgit/fem-book<gh_stars>10-100
import matplotlib.pyplot as plt
def model():
"""Solve u'' = -1, u(0)=0, u'(1)=0."""
import sympy as sym
x, c_0, c_1, = sym.symbols('x c_0 c_1')
u_x = sym.integrate(1, (x, 0, x)) + c_0
u = sym.integrate(u_x, (x, 0, x)) + c_1
r = sym.solve([u.subs(x,0) - 0,
sym.diff(u,x).subs(x, 1) - 0],
[c_0, c_1])
u = u.subs(c_0, r[c_0]).subs(c_1, r[c_1])
u = sym.simplify(sym.expand(u))
return u
def midpoint_rule(f, M=100000):
"""Integrate f(x) over [0,1] using M intervals."""
from numpy import sum, linspace
dx = 1.0/M # interval length
x = linspace(dx/2, 1-dx/2, M) # integration points
return dx*sum(f(x))
def check_integral_b():
from numpy import pi, sin
for i in range(12):
exact = 2/(pi*(2*i+1))
numerical = midpoint_rule(
f=lambda x: sin((2*i+1)*pi*x/2))
print(i, abs(exact - numerical))
def sine_sum(x, N):
s = 0
from numpy import pi, sin, zeros
u = [] # u[k] is the sum i=0,...,k
k = 0
for i in range(N+1):
s += - 16.0/((2*i+1)**3*pi**3)*sin((2*i+1)*pi*x/2)
u.append(s.copy()) # important with copy!
return u
def plot_sine_sum():
from numpy import linspace
x = linspace(0, 1, 501) # coordinates for plot
u = sine_sum(x, N=10)
u_e = 0.5*x*(x-2)
N_values = 0, 1, 10
for k in N_values:
plt.plot(x, u[k])
plt.plot(x, u_e)
plt.legend(['N=%d' % k for k in N_values] + ['exact'],
loc='upper right')
plt.xlabel('$x$'); plt.ylabel('$u$')
plt.savefig('tmpc.png'); plt.savefig('tmpc.pdf')
def check_integral_d():
from numpy import pi, sin
for i in range(24):
if i % 2 == 0:
exact = 2/(pi*(i+1))
elif (i-1) % 4 == 0:
exact = 2*2/(pi*(i+1))
else:
exact = 0
numerical = midpoint_rule(
f=lambda x: sin((i+1)*pi*x/2))
print(i, abs(exact - numerical))
def check_integral_d_sympy_answer():
from numpy import pi, sin
for i in range(12):
exact = 2/(pi*(i+1))
numerical = midpoint_rule(
f=lambda x: sin((i+1)*pi*x/2))
print(i, abs(exact - numerical))
def sine_sum_d(x, N):
s = 0
from numpy import pi, sin, zeros
u = [] # u[k] is the sum i=0,...,k
k = 0
for i in range(N+1):
if i % 2 == 0: # even i
s += - 16.0/((i+1)**3*pi**3)*sin((i+1)*pi*x/2)
elif (i-1) % 4 == 0: # 1, 5, 9, 13, 17
s += - 2*16.0/((i+1)**3*pi**3)*sin((i+1)*pi*x/2)
else:
s += 0
u.append(s.copy())
return u
def plot_sine_sum_d():
from numpy import linspace
x = linspace(0, 1, 501) # coordinates for plot
u = sine_sum_d(x, N=20)
u_e = 0.5*x*(x-2)
N_values = 0, 1, 2, 3, 20
for k in N_values:
plt.plot(x, u[k])
plt.plot(x, u_e)
plt.legend(['N=%d' % k for k in N_values] + ['exact'],
loc='upper right')
plt.xlabel('$x$'); plt.ylabel('$u$')
#plt.axis([0.9, 1, -0.52, -0.49])
plt.savefig('tmpd.png'); plt.savefig('tmpd.pdf')
if __name__ == '__main__':
import sys
print(model())
print('sine 2*i+1 integral:')
check_integral_b()
print('sine i+1 integral, sympy answer:')
check_integral_d_sympy_answer()
print('sine i+1 integral:')
check_integral_d()
#sys.exit(0)
plot_sine_sum()
plt.figure()
plot_sine_sum_d()
plt.show()
| 2.890625 | 3 |
skgmm.py | liuliu663/speaker-recognition-py3 | 0 | 6728 | from sklearn.mixture import GaussianMixture
import operator
import numpy as np
import math
class GMMSet:
def __init__(self, gmm_order = 32):
self.gmms = []
self.gmm_order = gmm_order
self.y = []
def fit_new(self, x, label):
self.y.append(label)
gmm = GaussianMixture(self.gmm_order)
gmm.fit(x)
self.gmms.append(gmm)
def gmm_score(self, gmm, x):
return np.sum(gmm.score(x))
@staticmethod
def softmax(scores):
scores_sum = sum([math.exp(i) for i in scores])
score_max = math.exp(max(scores))
return round(score_max / scores_sum, 3)
def predict_one(self, x):
scores = [self.gmm_score(gmm, x) / len(x) for gmm in self.gmms]
p = sorted(enumerate(scores), key=operator.itemgetter(1), reverse=True)
p = [(str(self.y[i]), y, p[0][1] - y) for i, y in p]
result = [(self.y[index], value) for (index, value) in enumerate(scores)]
p = max(result, key=operator.itemgetter(1))
softmax_score = self.softmax(scores)
return p[0], softmax_score
def before_pickle(self):
pass
def after_pickle(self):
pass
| 2.75 | 3 |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/12_features/numtrees_20/rule_6.py | apcarrik/kaggle | 0 | 6729 | def findDecision(obj): #obj[0]: Passanger, obj[1]: Time, obj[2]: Coupon, obj[3]: Gender, obj[4]: Age, obj[5]: Education, obj[6]: Occupation, obj[7]: Bar, obj[8]: Coffeehouse, obj[9]: Restaurant20to50, obj[10]: Direction_same, obj[11]: Distance
# {"feature": "Age", "instances": 51, "metric_value": 0.9662, "depth": 1}
if obj[4]>0:
# {"feature": "Occupation", "instances": 44, "metric_value": 0.9024, "depth": 2}
if obj[6]>1:
# {"feature": "Bar", "instances": 33, "metric_value": 0.9834, "depth": 3}
if obj[7]<=1.0:
# {"feature": "Education", "instances": 22, "metric_value": 0.994, "depth": 4}
if obj[5]>0:
# {"feature": "Passanger", "instances": 17, "metric_value": 0.9774, "depth": 5}
if obj[0]<=2:
# {"feature": "Time", "instances": 11, "metric_value": 0.994, "depth": 6}
if obj[1]<=2:
# {"feature": "Restaurant20to50", "instances": 8, "metric_value": 0.9544, "depth": 7}
if obj[9]>0.0:
# {"feature": "Coffeehouse", "instances": 6, "metric_value": 0.65, "depth": 8}
if obj[8]<=2.0:
return 'True'
elif obj[8]>2.0:
return 'False'
else: return 'False'
elif obj[9]<=0.0:
return 'False'
else: return 'False'
elif obj[1]>2:
return 'False'
else: return 'False'
elif obj[0]>2:
# {"feature": "Gender", "instances": 6, "metric_value": 0.65, "depth": 6}
if obj[3]>0:
return 'True'
elif obj[3]<=0:
# {"feature": "Time", "instances": 2, "metric_value": 1.0, "depth": 7}
if obj[1]<=2:
return 'True'
elif obj[1]>2:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
elif obj[5]<=0:
return 'False'
else: return 'False'
elif obj[7]>1.0:
# {"feature": "Coupon", "instances": 11, "metric_value": 0.684, "depth": 4}
if obj[2]>2:
return 'True'
elif obj[2]<=2:
# {"feature": "Direction_same", "instances": 4, "metric_value": 1.0, "depth": 5}
if obj[10]>0:
return 'True'
elif obj[10]<=0:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
elif obj[6]<=1:
return 'True'
else: return 'True'
elif obj[4]<=0:
# {"feature": "Passanger", "instances": 7, "metric_value": 0.5917, "depth": 2}
if obj[0]>0:
return 'False'
elif obj[0]<=0:
return 'True'
else: return 'True'
else: return 'False'
| 2.765625 | 3 |
lib/loss/__init__.py | kennethwdk/PINet | 10 | 6730 | <filename>lib/loss/__init__.py
from .heatmaploss import HeatmapLoss
from .offsetloss import OffsetLoss
from .refineloss import RefineLoss | 1.0625 | 1 |
eth2/beacon/types/historical_batch.py | AndrewBezold/trinity | 0 | 6731 | from typing import Sequence
from eth.constants import ZERO_HASH32
from eth_typing import Hash32
import ssz
from ssz.sedes import Vector, bytes32
from eth2.configs import Eth2Config
from .defaults import default_tuple, default_tuple_of_size
class HistoricalBatch(ssz.Serializable):
fields = [("block_roots", Vector(bytes32, 1)), ("state_roots", Vector(bytes32, 1))]
def __init__(
self,
*,
block_roots: Sequence[Hash32] = default_tuple,
state_roots: Sequence[Hash32] = default_tuple,
config: Eth2Config = None
) -> None:
if config:
# try to provide sane defaults
if block_roots == default_tuple:
block_roots = default_tuple_of_size(
config.SLOTS_PER_HISTORICAL_ROOT, ZERO_HASH32
)
if state_roots == default_tuple:
state_roots = default_tuple_of_size(
config.SLOTS_PER_HISTORICAL_ROOT, ZERO_HASH32
)
super().__init__(block_roots=block_roots, state_roots=state_roots)
| 2.234375 | 2 |
app/settings.py | nikosk/fastAPI-microservice-example- | 0 | 6732 | import os
from pydantic import BaseSettings
class Settings(BaseSettings):
DEBUG: bool
DATABASE_URL: str
class Config:
env_file = os.getenv("CONFIG_FILE", ".env")
| 2.03125 | 2 |
ADVECTOR/io_tools/create_bathymetry.py | john-science/ADVECTOR | 7 | 6733 | <filename>ADVECTOR/io_tools/create_bathymetry.py
import numpy as np
import xarray as xr
def create_bathymetry_from_land_mask(land_mask: xr.DataArray) -> xr.DataArray:
"""Method: identifies the lower depth bound of the shallowest
ocean cell (non-null) in each vertical grid column.
:param land_mask: dimensions {time, depth, lat, lon}, boloean array, True where cell is land"""
assert np.all(land_mask.depth <= 0), "depth coordinate must be positive up"
assert np.all(
np.diff(land_mask.depth) > 0
), "depth coordinate must be sorted ascending"
# In the kernel, particles look up data based on the nearest cell-center.
# Thus cell bounds are the midpoints between each centers.
# Very top cell bound is surface, and bottom cell bounds are
# assumed to be symmetric about bottom cell center.
depth_diff = np.diff(land_mask.depth)
depth_bnds = np.concatenate(
[
land_mask.depth.values[:1] - depth_diff[0] / 2,
land_mask.depth.values[:-1] + depth_diff / 2,
[0],
]
)
bathy = (
(~land_mask)
.assign_coords({"depth": depth_bnds[:-1]})
.idxmax(dim="depth")
.where(~land_mask.isel(depth=-1), depth_bnds[-1])
)
bathy = bathy.drop(["time", "depth"])
bathy.name = "bathymetry"
bathy.attrs = {"units": "m", "positive": "up"}
return bathy
| 3.203125 | 3 |
unitconvert/distance.py | cr2630git/unitconvert | 0 | 6734 | <reponame>cr2630git/unitconvert
"""
A simple python module for converting kilometers to miles or vice versa.
So simple that it doesn't even have any dependencies.
"""
def kilometers_to_miles(dist_in_km):
"""
Actually does the conversion of distance from km to mi.
PARAMETERS
--------
dist_in_km: float
A distance in kilometers.
RETURNS
-------
dist_in_mi: float
The same distance converted to miles.
"""
return (dist_in_km)/1.609344
def miles_to_kilometers(dist_in_mi):
"""
Actually does the conversion of distance from mi to km.
PARAMETERS
----------
dist_in_mi: float
A distance to miles.
RETURNS
-------
dist_in_km: float
The same distance converted to kilometers.
"""
return (dist_in_mi)*1.609344
| 3.796875 | 4 |
contacts/migrations_old/0006_data_status.py | I-TECH-UW/mwachx | 3 | 6735 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools as it
from django.db import models, migrations
def convert_status(apps, schema_editor):
''' Migrate Visit.skipped and ScheduledPhoneCall.skipped -> status
(pending,missed,deleted,attended)
'''
Visit = apps.get_model("contacts","Visit")
ScheduledPhoneCall = apps.get_model("contacts","ScheduledPhoneCall")
for obj in it.chain(Visit.objects.all(), ScheduledPhoneCall.objects.all()):
if obj.skipped is None:
obj.status = 'pending'
elif obj.skipped == False:
obj.status = 'attended'
elif obj.skipped == True:
obj.status = 'missed'
obj.save()
def unconvert_status(apps, schema_editor):
''' Reverse function sets skipped based on status'''
Visit = apps.get_model("contacts","Visit")
ScheduledPhoneCall = apps.get_model("contacts","ScheduledPhoneCall")
for obj in it.chain(Visit.objects.all(), ScheduledPhoneCall.objects.all()):
if obj.status == 'pending':
obj.skipped = None
elif obj.status == 'attended':
obj.skipped = False
elif obj.status == 'missed':
obj.skipped = True
obj.save()
class Migration(migrations.Migration):
dependencies = [
('contacts', '0005_auto_add_visit_status'),
]
operations = [
migrations.RunPython(convert_status,unconvert_status),
]
| 2.25 | 2 |
core/tests/test_base_time_range_controller.py | One-Green/plant-keeper-master | 2 | 6736 | import os
import sys
from datetime import time
import unittest
sys.path.append(
os.path.dirname(
os.path.dirname(os.path.join("..", "..", "..", os.path.dirname("__file__")))
)
)
from core.controller import BaseTimeRangeController
class TestTimeRangeController(unittest.TestCase):
def test_time_range(self):
start_at = time(10, 0, 0)
end_at = time(12, 0, 0)
time_range_controller = BaseTimeRangeController(start_at, end_at)
time_now = time(11, 0, 0)
time_range_controller.set_current_time(time_now)
self.assertTrue(time_range_controller.action)
time_now = time(12, 15, 0)
time_range_controller.set_current_time(time_now)
self.assertFalse(time_range_controller.action)
if __name__ == "__main__":
unittest.main()
| 2.734375 | 3 |
generator_code/mp3_generator.py | jurganson/spingen | 0 | 6737 | <gh_stars>0
from gtts import gTTS as ttos
from pydub import AudioSegment
import os
def generate_mp3 (segments, fade_ms, speech_gain, comment_fade_ms, language = "en", output_file_name = "generated_program_sound") :
def apply_comments (exercise_audio, segment) :
new_exercise_audio = exercise_audio
for comment in segment.comments :
comment_audio = comment["comment_audio"]
comment_time_ms = comment["second"]*1000 + comment["minute"]*60000
part_01 = new_exercise_audio[comment_time_ms:comment_time_ms+len(comment_audio)+comment_fade_ms*2]
part_02 = part_01.fade(to_gain=-speech_gain, start=0, end=comment_fade_ms)
part_02 = part_02.fade(to_gain= speech_gain, start=comment_fade_ms+len(comment_audio), end=len(part_02))
part_02 = part_02.overlay(comment_audio, position=comment_fade_ms)
new_exercise_audio = new_exercise_audio[:comment_time_ms] + part_02 + new_exercise_audio[comment_time_ms+len(part_02):]
return new_exercise_audio
def append_segment (current_audio, next_segment, future_segment) :
segment_audio = next_segment.song_audio
segment_audio_faded = segment_audio - speech_gain
segment_text_audio = next_segment.text_audio
part_01 = segment_audio_faded[:len(segment_text_audio)] # First part of next segment
part_01 = current_audio[-len(segment_text_audio):].append(part_01, crossfade=len(segment_text_audio)).overlay(segment_text_audio) #
part_02 = part_01 + segment_audio_faded[len(part_01):len(part_01)+fade_ms].fade(to_gain=speech_gain, start=0, end=fade_ms) # Faded up to exercise gain
part_03 = apply_comments(segment_audio[len(part_02):len(part_02)+next_segment.get_exercise_duration_ms()+fade_ms], next_segment) # Apply comments to exercise
part_03 = part_02 + part_03.fade(to_gain=-speech_gain, start=len(part_03)-fade_ms, end=len(part_03))
part_04 = current_audio[:-len(segment_text_audio)] + part_03
if not future_segment :
part_05 = part_04.fade_out(fade_ms)
ttos(text="Program finished", lang=language, slow=False).save("output.mp3")
finish_voice = AudioSegment.from_file("output.mp3")
print("Cleaning up output.mp3")
os.remove("output.mp3")
return part_05 + finish_voice
else :
part_05 = part_04 + segment_audio_faded[len(part_03):len(part_03)+len(future_segment.text_audio)]
return part_05
print("Generating MP3 for segment 1 of " + str(len(segments)))
intro_segment_audio = segments[0].song_audio
intro_segment_text_audio = segments[0].text_audio
intro_segment_audio_faded = intro_segment_audio - speech_gain
part_01 = intro_segment_audio_faded[:fade_ms].fade_in(fade_ms)
part_02 = part_01 + intro_segment_audio_faded[len(part_01):len(part_01)+len(intro_segment_text_audio)].overlay(intro_segment_text_audio)
part_03 = part_02 + intro_segment_audio_faded[len(part_02):len(part_02)+fade_ms].fade(to_gain=speech_gain, start=0, end=fade_ms)
part_04 = apply_comments(intro_segment_audio[len(part_03):len(part_03)+segments[0].get_exercise_duration_ms()+fade_ms], segments[0])
part_04 = part_03 + part_04.fade(to_gain=-speech_gain, start=len(part_04)-fade_ms, end=len(part_04))
part_05 = part_04 + intro_segment_audio_faded[len(part_04):len(part_04)+len(segments[1].text_audio)]
program_audio = part_05
for i in range(1, len(segments)) :
print("Generating MP3 for segment " + str(i+1) + " of " + str(len(segments)))
if i+1 >= len(segments) :
program_audio = append_segment(program_audio, segments[i], None)
else :
program_audio = append_segment(program_audio, segments[i], segments[i+1])
if not os.path.exists("./output") :
os.mkdir("./output")
print("Exporting final mp3 ...")
file_path = "./output/"+output_file_name+".mp3"
program_audio.export(file_path, format="mp3")
print("Done! Exported mp3 to "+ file_path)
| 2.8125 | 3 |
relaax/algorithms/ddpg/parameter_server.py | deeplearninc/relaax | 71 | 6738 | from __future__ import absolute_import
from relaax.server.parameter_server import parameter_server_base
from relaax.server.common import session
from . import ddpg_model
class ParameterServer(parameter_server_base.ParameterServerBase):
def init_session(self):
self.session = session.Session(ddpg_model.SharedParameters())
self.session.op_initialize()
self.session.op_init_target_weights()
def n_step(self):
return self.session.op_n_step()
def score(self):
return self.session.op_score()
def get_session(self):
return self.session
| 2.015625 | 2 |
scripts/make_VFS.py | nvoron23/brython | 1 | 6739 | # -*- coding: utf-8 -*-
import json
import os
import pyminifier
try:
import io as StringIO
except ImportError:
import cStringIO as StringIO # lint:ok
# Check to see if slimit or some other minification library is installed and
# Set minify equal to slimit's minify function.
try:
import slimit
js_minify = slimit.minify
except ImportError as error:
print(error)
js_minify = slimit = None
###############################################################################
def process_unittest(filename):
"""Process a VFS filename for Brython."""
print("Generating {}".format(filename))
nb = 0
nb_err = 0
_main_root = os.path.dirname(filename)
_VFS = {}
for _mydir in ("Lib",):
for _root, _dir, _files in os.walk(os.path.join(_main_root, _mydir)):
if 'unittest' not in _root:
continue
if '__pycache__' in _root:
continue
for _file in _files:
_ext = os.path.splitext(_file)[1]
if _ext not in ('.py'):
continue
nb += 1
file_name = os.path.join(_root, _file)
try: # python 3
with open(file_name, encoding="utf-8") as file_with_data:
_data = file_with_data.read()
except Exception as reason: # python 2
with open(file_name, "r") as file_with_data:
_data = str(file_with_data.read()).decode("utf-8")
if not len(_data):
print("No data for {} ({}).".format(_file, type(_data)))
if _ext.lower() == '.py' and _data:
try:
_data = pyminifier.remove_comments_and_docstrings(
_data)
_data = pyminifier.dedent(_data)
except Exception as error:
print(error)
nb_err += 1
_vfs_filename = os.path.join(
_root, _file).replace(_main_root, '')
_vfs_filename = _vfs_filename.replace("\\", "/")
mod_name = _vfs_filename[len(_mydir) + 2:].replace('/', '.')
mod_name, ext = os.path.splitext(mod_name)
is_package = mod_name.endswith('__init__')
if is_package:
mod_name = mod_name[:-9]
_VFS[mod_name] = [_data, 1]
else:
_VFS[mod_name] = [_data]
print(("Adding %s %s" % (mod_name, _vfs_filename)))
print('%s files, %s errors' % (nb, nb_err))
with open(filename, "w") as file_to_write_VFS:
file_to_write_VFS.write('__BRYTHON__.libs = __BRYTHON__.libs || {};\n')
file_to_write_VFS.write("__BRYTHON__.=libs['unittest']=%s;\n\n" % json.dumps(_VFS))
file_to_write_VFS.write("""
__BRYTHON__.import_from_unittest function(mod_name){
var stored = __BRYTHON__.libs['unittest'][mod_name]
if(stored!==undefined){
var module_contents = stored[0]
var is_package = stored[1]
var path = 'py_unittest'
var module = {name:mod_name,__class__:$B.$ModuleDict,is_package:is_package}
if(is_package){var package=mod_name}
else{
var elts = mod_name.split('.')
elts.pop()
var package = elts.join('.')
}
$B.modules[mod_name].$package = is_package
$B.modules[mod_name].__package__ = package
run_py(module,path,module_contents)
return true
}
return null
}
// add this import function to brython by doing the following:
// <body onload="brython({custom_import_funcs:[__BRYTHON__.import_from_unittest]})">
// this will allow us to import unittest modules.
""")
def process(filename, exclude_dirs=['unittest',]):
"""Process a VFS filename for Brython."""
print("Generating {}".format(filename))
nb = 0
nb_err = 0
_main_root = os.path.dirname(filename)
_VFS = {}
for _mydir in ("libs", "Lib"):
for _root, _dir, _files in os.walk(os.path.join(_main_root, _mydir)):
#if _root.endswith('lib_migration'):
_flag=False
for _exclude in exclude_dirs:
if _exclude in _root: #_root.endswith(_exclude):
_flag=True
continue
if _flag:
continue # skip these modules
if '__pycache__' in _root:
continue
nb += 1
for _file in _files:
_ext = os.path.splitext(_file)[1]
if _ext not in ('.js', '.py'):
continue
nb += 1
with open(os.path.join(_root, _file), "r") as file_with_data:
_data = file_with_data.read()
if len(_data) == 0:
print('no data for %s' % _file)
_data = unicode('')
print(_data, type(_data))
else:
_data = _data.decode('utf-8')
if _ext in '.js':
if js_minify is not None:
try:
_data = js_minify(_data)
except Exception as error:
print(error)
elif _ext == '.py' and len(_data) > 0:
try:
_data = pyminifier.remove_comments_and_docstrings(_data)
_data = pyminifier.dedent(_data)
except Exception as error:
print(error)
nb_err += 1
_vfs_filename = os.path.join(_root, _file).replace(_main_root, '')
_vfs_filename = _vfs_filename.replace("\\", "/")
if _vfs_filename.startswith('/libs/crypto_js/rollups/'):
if _file not in ('md5.js', 'sha1.js', 'sha3.js',
'sha224.js', 'sha384.js', 'sha512.js'):
continue
mod_name = _vfs_filename[len(_mydir) + 2:].replace('/', '.')
mod_name, ext = os.path.splitext(mod_name)
is_package = mod_name.endswith('__init__')
if is_package:
mod_name = mod_name[:-9]
_VFS[mod_name] = [ext, _data, 1]
else:
_VFS[mod_name] = [ext, _data]
print(("adding %s %s" % (mod_name, _vfs_filename)))
print('%s files, %s errors' % (nb, nb_err))
with open(filename, "w") as file_to_write_VFS:
file_to_write_VFS.write('__BRYTHON__.use_VFS = true;\n')
file_to_write_VFS.write('__BRYTHON__.VFS=%s;\n\n' % json.dumps(_VFS))
###############################################################################
if __name__ == '__main__':
_main_root = os.path.join(os.getcwd(), '../src')
process(os.path.join(_main_root, "py_VFS.js"))
| 2.4375 | 2 |
main.py | rcox771/spectrum_scanner | 0 | 6740 | <gh_stars>0
from rtlsdr import RtlSdr
from contextlib import closing
from matplotlib import pyplot as plt
import numpy as np
from scipy.signal import spectrogram, windows
from scipy import signal
from skimage.io import imsave, imread
from datetime import datetime
import json
import os
from tqdm import tqdm
import time
from queue import Queue
import asyncio
from pathlib import Path
import warnings
for cat in [RuntimeWarning, UserWarning, FutureWarning]:
warnings.filterwarnings("ignore", category=cat)
def split_images(dir="sdr_captures/specs_raw"):
jpgs = list(Path(dir).rglob('*.jpg'))
pngs = list(Path(dir).rglob('*.png'))
img_files = pngs + jpgs
img_files = list(filter(lambda x: 'chk' not in str(x), img_files))
for img_file in tqdm(img_files, desc="splitting images"):
im = imread(img_file)
shp = list(im.shape)
shp = list(filter(lambda x: x != 1, shp))
shp = np.array(shp)
dim_to_slide_over = shp.argmax()
win_size = shp[shp.argmin()]
im_size = shp[dim_to_slide_over]
for start in range(0, im_size, win_size):
stop = start + win_size
if stop >= im_size:
break
if dim_to_slide_over == 0:
chunk = im[start:stop, :]
elif dim_to_slide_over == 1:
chunk = im[:, start:stop]
file_out = str(
Path(img_file).with_suffix(f".chk_{start}_{stop}.png"))
imsave(file_out, chunk)
# y -- spectrogram, nf by nt array
# dbf -- Dynamic range of the spectrum
def adjust_dyn_range(x, mx=3, mn=10, rel_to=np.median):
r = rel_to(x)
zmax = r+mx
zmin = r-mn
x[x<zmin] = zmin
x[x>zmax] = zmax
return x
def to_spec(y, fs, fc, NFFT=1024, dbf=60, nperseg=128, normalize=True):
#w = windows.hamming(nperseg)
#window = signal.kaiser(nperseg, beta=14)
f, t, y = spectrogram(y, detrend=None, noverlap=int(nperseg/2), nfft=NFFT, fs=fs)
y = np.fft.fftshift(y, axes=0)
if normalize:
#y = norm_spectrum(y)
y = np.sqrt(np.power(y.real, 2) + np.power(y.imag, 2))
y = 20 * np.log10(np.abs(y)/ np.abs(y).max())
y = np.abs(y)
y = y / y.max()
return y
from sklearn.preprocessing import MinMaxScaler, StandardScaler
def spectrogram(x, fs, fc, m=None, dbf=60):
if not m:
m = 1024
isreal_bool = np.isreal(x).all()
lx = len(x);
nt = (lx + m - 1) // m
x = np.append(x,np.zeros(-lx+nt*m))
x = x.reshape((int(m/2),nt*2), order='F')
x = np.concatenate((x,x),axis=0)
x = x.reshape((m*nt*2,1),order='F')
x = x[np.r_[m//2:len(x),np.ones(m//2)*(len(x)-1)].astype(int)].reshape((m,nt*2),order='F')
xmw = x * windows.hamming(m)[:,None]
t_range = [0.0, lx / fs]
if isreal_bool:
f_range = [ fc, fs / 2.0 + fc]
xmf = np.fft.fft(xmw,len(xmw),axis=0)
xmf = xmf[0:m/2,:]
else:
f_range = [-fs / 2.0 + fc, fs / 2.0 + fc]
xmf = np.fft.fftshift( np.fft.fft( xmw ,len(xmw),axis=0), axes=0 )
f_range = np.linspace(*f_range, xmf.shape[0])
t_range = np.linspace(*t_range, xmf.shape[1])
h = xmf.shape[0]
each = int(h*.10)
xmf = xmf[each:-each, :]
xmf = np.sqrt(np.power(xmf.real, 2) + np.power(xmf.imag, 2))
xmf = np.abs(xmf)
xmf /= xmf.max()
#throw away sides
xmf = 20 * np.log10(xmf)
xmf = np.clip(xmf, -dbf, 0)
xmf = MinMaxScaler().fit_transform(StandardScaler(with_mean=True, with_std=True).fit_transform(xmf))
xmf = np.abs(xmf)
#xmf-=np.median(xmf)
xmf/=xmf.max()
print(xmf.min(), xmf.max())
return f_range, t_range, xmf
def append_json(data, path):
with open(path, 'a') as f:
f.write(json.dumps(data) + '\n')
async def stream(sdr, N):
samples_buffer = Queue()
total = 0
with tqdm(total=N, desc='sampling') as pbar:
#for i in range(10):
# time.sleep(0.1)
async for samples in sdr.stream():
# do something with samples
# ...
samples_buffer.put(samples)
#print(f'put {len(samples)} into buffer')
total += len(samples)
pbar.update(len(samples))
if total >= N:
break
# to stop streaming:
await sdr.stop()
# done
sdr.close()
return samples_buffer
def capture(fc=94.3e6,
fs=int(1e6),
gain='auto',
seconds_dwell=.4
#offset_dc=5e4
):
N = int(seconds_dwell * fs)
with closing(RtlSdr()) as sdr:
sdr.sample_rate = fs
sdr.center_freq = fc# + int(offset_dc)
sdr.gain = gain
t = datetime.now()
stamp = datetime.timestamp(t)
loop = asyncio.get_event_loop()
samples_buffer = loop.run_until_complete(stream(sdr, N))
iq_samples = np.hstack(np.array(list(samples_buffer.queue)))[:N].astype("complex64")
#iq_samples = shift_mix(iq_samples, -offset_dc, fs)
#path = os.path.join(out_dir, f'{stamp}.png')
meta = dict(
fs=fs,
fc=fc,
gain=gain,
seconds_dwell=seconds_dwell,
dt_start=stamp
)
return iq_samples, meta
def shift_mix(x, hz, fs):
return x*np.exp(1j*2*np.pi*hz/fs*np.arange(len(x)))
def save_capture(path, spec_img, meta, meta_path):
imsave(path, spec_img.T)
append_json(meta, meta_path)
def scan(
low=80e6,
high=1000e6,
repeats=10,
target_hpb=300,
):
out_dir="sdr_captures/specs_raw"
meta_path="sdr_captures/dataset.json"
os.makedirs(out_dir, exist_ok=True)
for repeat in tqdm(range(repeats), desc='repeats'):
for fs in [int(3.2e6)]:#list(map(int, (3.2e6, 2e6, 1e6))):
#for NFFT in [1024, 2048, 2048 * 2]:
fcs = []
fc = low
while fc < high:
fc += int((fs * (1/3.)))
fcs.append(fc)
fcs = np.array(fcs)
print(f'scanning {len(fcs)} total frequencies...')
for fc in tqdm(fcs, desc='fcs'):
try:
iq, meta = capture(fc=fc, fs=fs)
meta['NFFT'] = closest_power_of_two(fs / target_hpb)
meta['hpb'] = fs/meta['NFFT']
ff, tt, spec_img = spectrogram(iq, fs, fc, m=meta['NFFT'])
img_path = os.path.join(out_dir, f"{meta['dt_start']}.png")
save_capture(img_path, spec_img, meta, meta_path)
except Exception as e:
print(e)
time.sleep(1)
pass
def get_optimal_fs(max_fs=3e6):
fss = np.array([np.power(2,i) for i in range(30)])
fss = fss[fss<=max_fs][-1]
return fss
def optimal_scan(
min_freq=80e6,
max_freq=107e6,
fs=3e6,
hpb_target=4096
):
fs2 = get_optimal_fs(fs)
if fs2!=fs:
print(f'optimal fs found: {fs2}, original: {fs}')
fs = fs2
del fs2
n_bins = closest_power_of_two(fs / hpb_target)
print(f'given hz per bin target: {hpb_target} -> nfft bins per sweep: {n_bins}')
assert fs == hpb_target * n_bins
print(f'{fs} = {hpb_target} * {n_bins}')
diff_bw = max_freq-min_freq
sweeps = np.ceil(diff_bw/fs) + 1
sweep_bw = sweeps * fs
delta_bw = sweep_bw - diff_bw
adjusted_min_freq = min_freq - int(delta_bw//2)
adjusted_max_freq = max_freq + int(delta_bw//2)
assert (adjusted_max_freq-adjusted_min_freq) == sweep_bw
print(f'optimal min/max frequecies: {adjusted_min_freq}/{adjusted_max_freq}')
min_freq = adjusted_min_freq
max_freq = adjusted_max_freq
freq_bins = np.arange(n_bins*sweeps)
fz = np.arange(min_freq, max_freq, hpb_target).astype(int)
return freq_bins, fz
def closest_power_of_two(number):
# Returns next power of two following 'number'
n = np.ceil(np.log2(number))
a = np.array([np.power(2, n - 1), np.power(2, n), np.power(2, n + 1)])
return int(a[np.argmin(np.abs(a - number))])
def norm_spectrum(spec_img):
spec_img = 20 * np.log10(np.abs(spec_img) / np.max(np.abs(spec_img)))
mid = np.median(spec_img)
# high = mid + 30
# low = mid - 30
# spec_img[spec_img < low] = low
# spec_img[spec_img > high] = high
spec_img = np.abs(spec_img)
spec_img /= spec_img.max()
print('spec max:', spec_img.max(), 'spec min:', spec_img.min())
return spec_img
def parse_measure(s):
s = s.lower()
if s[-1].isalpha():
h, mod = float(s[:-1]), s[-1]
if mod == 'm':
h*=1e6
elif mod == 'k':
h*=1e3
else:
h = int(s)
return h
def string_to_linspace(s, delim=':'):
return np.arange(*list(map(parse_measure, s.split(delim))))
#string_to_linspace('24M:28M:3M')
def plot_one(fc=94.3 * 1e6, fs=3e6, target_hpb=300, seconds_dwell=.2):
NFFT = closest_power_of_two(fs / target_hpb)
iq_samples, meta = capture(fc=fc, fs=fs, seconds_dwell=seconds_dwell)
spec_img = to_spec(iq_samples, fs, fc, NFFT=NFFT)
#spec_img = norm_spectrum(spec_img)
#spec_img = np.abs(spec_img)
#spec_img /= spec_img.max()
#spec_img = 1 - spec_img
print('img shape:', spec_img.shape)
fig, ax = plt.subplots(1, 1, figsize=(14, 4))
ax.matshow(spec_img.T[:NFFT], cmap=plt.get_cmap('viridis'))
print(spec_img.T.shape)
#Wplt.plot(spec_img.T[0, :])
plt.show()
if __name__ == "__main__":
#split_images()
#plot_one()
scan(repeats=3, target_hpb=1500)
split_images()
#plot_one() | 2.140625 | 2 |
test/__init__.py | donbowman/rdflib | 1,424 | 6741 | #
import os
TEST_DIR = os.path.abspath(os.path.dirname(__file__))
| 1.476563 | 1 |
examples/mnist1.py | braingineer/pyromancy | 0 | 6742 | from __future__ import print_function
import argparse
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torchvision import datasets, transforms
from tqdm import tqdm
from pyromancy import pyromq
from pyromancy.losses import LossGroup, NegativeLogLikelihood
from pyromancy.metrics import MetricGroup, Accuracy
from pyromancy.subscribers import LogSubscriber
def parse_args():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--weight-decay', default=1e-4, type=float)
parser.add_argument('--grad-clip-norm', default=10.0, type=float)
parser.add_argument('--disable-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
# Name the experiment
parser.add_argument('--experiment-name', required=True)
parser.add_argument("--experimentdb", default=None)
parser.add_argument('--log-to-console', default=False, action='store_true')
args = parser.parse_args()
if args.experimentdb is None:
args.experimentdb = args.experiment_name + '.db'
return args
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x)
# noinspection PyCallingNonCallable,PyCallingNonCallable
def run_once(args, train_loader, test_loader):
broker = pyromq.Broker()
model = Net()
if args.cuda:
model.cuda()
training_events = pyromq.TrainingEventPublisher(broker=broker)
broker.add_subscriber(LogSubscriber(experiment_uid=args.experiment_name,
log_file=os.path.join('logs', args.experiment_name),
to_console=args.log_to_console))
opt = torch.optim.SGD(params=model.parameters(),
lr=args.lr,
weight_decay=args.weight_decay,
momentum=args.momentum)
losses = LossGroup(optimizer=opt,
grad_clip_norm=args.grad_clip_norm,
name='losses',
channel_name=pyromq.channels.METRIC_EVENTS,
broker=broker)
losses.add(NegativeLogLikelihood(name='nll',
target_name='y_target',
output_name='y_pred'),
data_target='train')
# Metrics
metrics = MetricGroup(name='metrics',
channel_name=pyromq.channels.METRIC_EVENTS,
broker=broker)
metrics.add(Accuracy(name='acc',
target_name='y_target',
output_name='y_pred'),
data_target='*')
metrics.add(NegativeLogLikelihood(name='nll',
target_name='y_target',
output_name='y_pred'),
data_target='val')
training_events.training_start()
for _ in tqdm(range(args.epochs), total=args.epochs):
training_events.epoch_start()
model.train(True)
for data, target in train_loader:
# From the original example
if args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
# put the incoming batch data into a dictionary
batch_dict = {'x_data': data, 'y_target': target}
# Training Event
training_events.batch_start()
# Get model outputs
predictions = {'y_pred': model(batch_dict['x_data'])}
# Compute Metrics
metrics.compute(in_dict=batch_dict, out_dict=predictions,
data_type='train')
# Compute Losses
losses.compute(in_dict=batch_dict, out_dict=predictions,
data_type='train')
losses.step()
# Training Event
training_events.batch_end()
model.train(False)
for data, target in test_loader:
if args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data, volatile=True), Variable(target)
batch_dict = {'x_data': data, 'y_target': target}
# Training Event
training_events.batch_start()
predictions = {'y_pred': model(batch_dict['x_data'])}
metrics.compute(in_dict=batch_dict,
out_dict=predictions,
data_type='val')
training_events.batch_end()
training_events.epoch_end()
def main():
args = parse_args()
args.cuda = not args.disable_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
dataload_kwargs = {}
if args.cuda:
dataload_kwargs = {'num_workers': 1, 'pin_memory': True}
train_dataset = datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
# noinspection PyUnresolvedReferences
train_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=args.batch_size,
shuffle=True, **dataload_kwargs)
test_dataset = datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
# noinspection PyUnresolvedReferences
test_loader = torch.utils.data.DataLoader(test_dataset,
batch_size=args.batch_size,
shuffle=True, **dataload_kwargs)
run_once(args, train_loader, test_loader)
if __name__ == "__main__":
main()
| 2.15625 | 2 |
selfdrive/locationd/calibrationd.py | matthewklinko/openpilot | 3 | 6743 | #!/usr/bin/env python
import os
import copy
import json
import numpy as np
import selfdrive.messaging as messaging
from selfdrive.locationd.calibration_helpers import Calibration
from selfdrive.swaglog import cloudlog
from common.params import Params
from common.transformations.model import model_height
from common.transformations.camera import view_frame_from_device_frame, get_view_frame_from_road_frame, \
eon_intrinsics, get_calib_from_vp, H, W
MPH_TO_MS = 0.44704
MIN_SPEED_FILTER = 15 * MPH_TO_MS
MAX_YAW_RATE_FILTER = np.radians(2) # per second
INPUTS_NEEDED = 300 # allow to update VP every so many frames
INPUTS_WANTED = 600 # We want a little bit more than we need for stability
WRITE_CYCLES = 400 # write every 400 cycles
VP_INIT = np.array([W/2., H/2.])
# These validity corners were chosen by looking at 1000
# and taking most extreme cases with some margin.
VP_VALIDITY_CORNERS = np.array([[W//2 - 150, 280], [W//2 + 150, 540]])
DEBUG = os.getenv("DEBUG") is not None
def is_calibration_valid(vp):
return vp[0] > VP_VALIDITY_CORNERS[0,0] and vp[0] < VP_VALIDITY_CORNERS[1,0] and \
vp[1] > VP_VALIDITY_CORNERS[0,1] and vp[1] < VP_VALIDITY_CORNERS[1,1]
class Calibrator(object):
def __init__(self, param_put=False):
self.param_put = param_put
self.vp = copy.copy(VP_INIT)
self.vps = []
self.cal_status = Calibration.UNCALIBRATED
self.write_counter = 0
self.just_calibrated = False
self.params = Params()
calibration_params = self.params.get("CalibrationParams")
if calibration_params:
try:
calibration_params = json.loads(calibration_params)
self.vp = np.array(calibration_params["vanishing_point"])
self.vps = np.tile(self.vp, (calibration_params['valid_points'], 1)).tolist()
self.update_status()
except Exception:
cloudlog.exception("CalibrationParams file found but error encountered")
def update_status(self):
start_status = self.cal_status
if len(self.vps) < INPUTS_NEEDED:
self.cal_status = Calibration.UNCALIBRATED
else:
self.cal_status = Calibration.CALIBRATED if is_calibration_valid(self.vp) else Calibration.INVALID
end_status = self.cal_status
self.just_calibrated = False
if start_status == Calibration.UNCALIBRATED and end_status == Calibration.CALIBRATED:
self.just_calibrated = True
def handle_cam_odom(self, log):
trans, rot = log.trans, log.rot
if np.linalg.norm(trans) > MIN_SPEED_FILTER and abs(rot[2]) < MAX_YAW_RATE_FILTER:
new_vp = eon_intrinsics.dot(view_frame_from_device_frame.dot(trans))
new_vp = new_vp[:2]/new_vp[2]
self.vps.append(new_vp)
self.vps = self.vps[-INPUTS_WANTED:]
self.vp = np.mean(self.vps, axis=0)
self.update_status()
self.write_counter += 1
if self.param_put and (self.write_counter % WRITE_CYCLES == 0 or self.just_calibrated):
cal_params = {"vanishing_point": list(self.vp),
"valid_points": len(self.vps)}
self.params.put("CalibrationParams", json.dumps(cal_params))
return new_vp
else:
return None
def send_data(self, pm):
calib = get_calib_from_vp(self.vp)
extrinsic_matrix = get_view_frame_from_road_frame(0, calib[1], calib[2], model_height)
cal_send = messaging.new_message()
cal_send.init('liveCalibration')
cal_send.liveCalibration.calStatus = self.cal_status
cal_send.liveCalibration.calPerc = min(len(self.vps) * 100 // INPUTS_NEEDED, 100)
cal_send.liveCalibration.extrinsicMatrix = [float(x) for x in extrinsic_matrix.flatten()]
cal_send.liveCalibration.rpyCalib = [float(x) for x in calib]
pm.send('liveCalibration', cal_send)
def calibrationd_thread(sm=None, pm=None):
if sm is None:
sm = messaging.SubMaster(['cameraOdometry'])
if pm is None:
pm = messaging.PubMaster(['liveCalibration'])
calibrator = Calibrator(param_put=True)
# buffer with all the messages that still need to be input into the kalman
while 1:
sm.update()
new_vp = calibrator.handle_cam_odom(sm['cameraOdometry'])
if DEBUG and new_vp is not None:
print 'got new vp', new_vp
calibrator.send_data(pm)
def main(sm=None, pm=None):
calibrationd_thread(sm, pm)
if __name__ == "__main__":
main()
| 2.140625 | 2 |
hunter/main.py | datastax-labs/hunter | 17 | 6744 | import argparse
import copy
import logging
import sys
from dataclasses import dataclass
from datetime import datetime, timedelta
from slack_sdk import WebClient
from typing import Dict, Optional, List
import pytz
from hunter import config
from hunter.attributes import get_back_links
from hunter.config import ConfigError, Config
from hunter.data_selector import DataSelector
from hunter.grafana import GrafanaError, Grafana, Annotation
from hunter.graphite import GraphiteError
from hunter.importer import DataImportError, Importers
from hunter.report import Report
from hunter.series import (
AnalysisOptions,
ChangePointGroup,
SeriesComparison,
compare,
AnalyzedSeries,
)
from hunter.slack import SlackNotifier, NotificationError
from hunter.test_config import TestConfigError, TestConfig, GraphiteTestConfig
from hunter.util import parse_datetime, DateFormatError, interpolate
@dataclass
class HunterError(Exception):
message: str
class Hunter:
__conf: Config
__importers: Importers
__grafana: Optional[Grafana]
__slack: Optional[SlackNotifier]
def __init__(self, conf: Config):
self.__conf = conf
self.__importers = Importers(conf)
self.__grafana = None
self.__slack = self.__maybe_create_slack_notifier()
def list_tests(self, group_names: Optional[List[str]]):
if group_names is not None:
test_names = []
for group_name in group_names:
group = self.__conf.test_groups.get(group_name)
if group is None:
raise HunterError(f"Test group not found: {group_name}")
test_names += (t.name for t in group)
else:
test_names = self.__conf.tests
for test_name in sorted(test_names):
print(test_name)
def list_test_groups(self):
for group_name in sorted(self.__conf.test_groups):
print(group_name)
def get_test(self, test_name: str) -> TestConfig:
test = self.__conf.tests.get(test_name)
if test is None:
raise HunterError(f"Test not found {test_name}")
return test
def get_tests(self, *names: str) -> List[TestConfig]:
tests = []
for name in names:
group = self.__conf.test_groups.get(name)
if group is not None:
tests += group
else:
test = self.__conf.tests.get(name)
if test is not None:
tests.append(test)
else:
raise HunterError(f"Test or group not found: {name}")
return tests
def list_metrics(self, test: TestConfig):
importer = self.__importers.get(test)
for metric_name in importer.fetch_all_metric_names(test):
print(metric_name)
def analyze(
self, test: TestConfig, selector: DataSelector, options: AnalysisOptions
) -> AnalyzedSeries:
importer = self.__importers.get(test)
series = importer.fetch_data(test, selector)
analyzed_series = series.analyze(options)
change_points = analyzed_series.change_points_by_time
report = Report(series, change_points)
print(test.name + ":")
print(report.format_log_annotated())
return analyzed_series
def __get_grafana(self) -> Grafana:
if self.__grafana is None:
self.__grafana = Grafana(self.__conf.grafana)
return self.__grafana
def update_grafana_annotations(self, test: GraphiteTestConfig, series: AnalyzedSeries):
grafana = self.__get_grafana()
begin = datetime.fromtimestamp(series.time()[0], tz=pytz.UTC)
end = datetime.fromtimestamp(series.time()[len(series.time()) - 1], tz=pytz.UTC)
logging.info(f"Fetching Grafana annotations for test {test.name}...")
tags_to_query = ["hunter", "change-point", "test:" + test.name]
old_annotations_for_test = grafana.fetch_annotations(begin, end, list(tags_to_query))
logging.info(f"Found {len(old_annotations_for_test)} annotations")
created_count = 0
for metric_name, change_points in series.change_points.items():
path = test.get_path(series.branch_name(), metric_name)
metric_tag = f"metric:{metric_name}"
tags_to_create = (
tags_to_query
+ [metric_tag]
+ test.tags
+ test.annotate
+ test.metrics[metric_name].annotate
)
substitutions = {
"TEST_NAME": test.name,
"METRIC_NAME": metric_name,
"GRAPHITE_PATH": [path],
"GRAPHITE_PATH_COMPONENTS": path.split("."),
"GRAPHITE_PREFIX": [test.prefix],
"GRAPHITE_PREFIX_COMPONENTS": test.prefix.split("."),
}
tmp_tags_to_create = []
for t in tags_to_create:
tmp_tags_to_create += interpolate(t, substitutions)
tags_to_create = tmp_tags_to_create
old_annotations = [a for a in old_annotations_for_test if metric_tag in a.tags]
old_annotation_times = set((a.time for a in old_annotations if a.tags))
target_annotations = []
for cp in change_points:
attributes = series.attributes_at(cp.index)
annotation_text = get_back_links(attributes)
target_annotations.append(
Annotation(
id=None,
time=datetime.fromtimestamp(cp.time, tz=pytz.UTC),
text=annotation_text,
tags=tags_to_create,
)
)
target_annotation_times = set((a.time for a in target_annotations))
to_delete = [a for a in old_annotations if a.time not in target_annotation_times]
if to_delete:
logging.info(
f"Removing {len(to_delete)} annotations "
f"for test {test.name} and metric {metric_name}..."
)
grafana.delete_annotations(*(a.id for a in to_delete))
to_create = [a for a in target_annotations if a.time not in old_annotation_times]
if to_create:
logging.info(
f"Creating {len(to_create)} annotations "
f"for test {test.name} and metric {metric_name}..."
)
grafana.create_annotations(*to_create)
created_count += len(to_create)
if created_count == 0:
logging.info("All annotations up-to-date. No new annotations needed.")
else:
logging.info(f"Created {created_count} annotations.")
def remove_grafana_annotations(self, test: Optional[TestConfig], force: bool):
"""Removes all Hunter annotations (optionally for a given test) in Grafana"""
grafana = self.__get_grafana()
if test:
logging.info(f"Fetching Grafana annotations for test {test.name}...")
else:
logging.info(f"Fetching Grafana annotations...")
tags_to_query = {"hunter", "change-point"}
if test:
tags_to_query.add("test:" + test.name)
annotations = grafana.fetch_annotations(None, None, list(tags_to_query))
if not annotations:
logging.info("No annotations found.")
return
if not force:
print(
f"Are you sure to remove {len(annotations)} annotations from {grafana.url}? [y/N]"
)
decision = input().strip()
if decision.lower() != "y" and decision.lower() != "yes":
return
logging.info(f"Removing {len(annotations)} annotations...")
grafana.delete_annotations(*(a.id for a in annotations))
def regressions(
self, test: TestConfig, selector: DataSelector, options: AnalysisOptions
) -> bool:
importer = self.__importers.get(test)
# Even if user is interested only in performance difference since some point X,
# we really need to fetch some earlier points than X.
# Otherwise, if performance went down very early after X, e.g. at X + 1, we'd have
# insufficient number of data points to compute the baseline performance.
# Instead of using `since-` selector, we're fetching everything from the
# beginning and then we find the baseline performance around the time pointed by
# the original selector.
since_version = selector.since_version
since_commit = selector.since_commit
since_time = selector.since_time
baseline_selector = copy.deepcopy(selector)
baseline_selector.last_n_points = sys.maxsize
baseline_selector.branch = None
baseline_selector.since_version = None
baseline_selector.since_commit = None
baseline_selector.since_time = since_time - timedelta(days=30)
baseline_series = importer.fetch_data(test, baseline_selector)
if since_version:
baseline_index = baseline_series.find_by_attribute("version", since_version)
if not baseline_index:
raise HunterError(f"No runs of test {test.name} with version {since_version}")
baseline_index = max(baseline_index)
elif since_commit:
baseline_index = baseline_series.find_by_attribute("commit", since_commit)
if not baseline_index:
raise HunterError(f"No runs of test {test.name} with commit {since_commit}")
baseline_index = max(baseline_index)
else:
baseline_index = baseline_series.find_first_not_earlier_than(since_time)
baseline_series = baseline_series.analyze()
if selector.branch:
target_series = importer.fetch_data(test, selector).analyze()
else:
target_series = baseline_series
cmp = compare(baseline_series, baseline_index, target_series, target_series.len())
regressions = []
for metric_name, stats in cmp.stats.items():
direction = baseline_series.metric(metric_name).direction
m1 = stats.mean_1
m2 = stats.mean_2
change_percent = stats.forward_rel_change() * 100.0
if m2 * direction < m1 * direction and stats.pvalue < options.max_pvalue:
regressions.append(
" {:16}: {:#8.3g} --> {:#8.3g} ({:+6.1f}%)".format(
metric_name, m1, m2, change_percent
)
)
if regressions:
print(f"{test.name}:")
for r in regressions:
print(r)
else:
print(f"{test.name}: OK")
return len(regressions) > 0
def __maybe_create_slack_notifier(self):
if not self.__conf.slack:
return None
return SlackNotifier(WebClient(token=self.__conf.slack.bot_token))
def notify_slack(
self,
test_change_points: Dict[str, AnalyzedSeries],
selector: DataSelector,
channels: List[str],
since: datetime,
):
if not self.__slack:
logging.error(
"Slack definition is missing from the configuration, cannot send notification"
)
return
self.__slack.notify(test_change_points, selector=selector, channels=channels, since=since)
def validate(self):
valid = True
unique_metrics = set()
for name, test in self.__conf.tests.items():
logging.info("Checking {}".format(name))
test_metrics = test.fully_qualified_metric_names()
for test_metric in test_metrics:
if test_metric not in unique_metrics:
unique_metrics.add(test_metric)
else:
valid = False
logging.error(f"Found duplicated metric: {test_metric}")
try:
importer = self.__importers.get(test)
series = importer.fetch_data(test)
for metric, metric_data in series.data.items():
if not metric_data:
logging.warning(f"Test's metric does not have data: {name} {metric}")
except Exception as err:
logging.error(f"Invalid test definition: {name}\n{repr(err)}\n")
valid = False
logging.info(f"Validation finished: {'VALID' if valid else 'INVALID'}")
if not valid:
exit(1)
def setup_data_selector_parser(parser: argparse.ArgumentParser):
parser.add_argument(
"--branch", metavar="STRING", dest="branch", help="name of the branch", nargs="?"
)
parser.add_argument(
"--metrics",
metavar="LIST",
dest="metrics",
help="a comma-separated list of metrics to analyze",
)
parser.add_argument(
"--attrs",
metavar="LIST",
dest="attributes",
help="a comma-separated list of attribute names associated with the runs "
"(e.g. commit, branch, version); "
"if not specified, it will be automatically filled based on available information",
)
since_group = parser.add_mutually_exclusive_group()
since_group.add_argument(
"--since-commit",
metavar="STRING",
dest="since_commit",
help="the commit at the start of the time span to analyze",
)
since_group.add_argument(
"--since-version",
metavar="STRING",
dest="since_version",
help="the version at the start of the time span to analyze",
)
since_group.add_argument(
"--since",
metavar="DATE",
dest="since_time",
help="the start of the time span to analyze; "
"accepts ISO, and human-readable dates like '10 weeks ago'",
)
until_group = parser.add_mutually_exclusive_group()
until_group.add_argument(
"--until-commit",
metavar="STRING",
dest="until_commit",
help="the commit at the end of the time span to analyze",
)
until_group.add_argument(
"--until-version",
metavar="STRING",
dest="until_version",
help="the version at the end of the time span to analyze",
)
until_group.add_argument(
"--until",
metavar="DATE",
dest="until_time",
help="the end of the time span to analyze; same syntax as --since",
)
parser.add_argument(
"--last",
type=int,
metavar="COUNT",
dest="last_n_points",
help="the number of data points to take from the end of the series"
)
def data_selector_from_args(args: argparse.Namespace) -> DataSelector:
data_selector = DataSelector()
if args.branch:
data_selector.branch = args.branch
if args.metrics is not None:
data_selector.metrics = list(args.metrics.split(","))
if args.attributes is not None:
data_selector.attributes = list(args.attributes.split(","))
if args.since_commit is not None:
data_selector.since_commit = args.since_commit
if args.since_version is not None:
data_selector.since_version = args.since_version
if args.since_time is not None:
data_selector.since_time = parse_datetime(args.since_time)
if args.until_commit is not None:
data_selector.until_commit = args.until_commit
if args.until_version is not None:
data_selector.until_version = args.until_version
if args.until_time is not None:
data_selector.until_time = parse_datetime(args.until_time)
if args.last_n_points is not None:
data_selector.last_n_points = args.last_n_points
return data_selector
def setup_analysis_options_parser(parser: argparse.ArgumentParser):
parser.add_argument(
"-P, --p-value",
dest="pvalue",
type=float,
default=0.001,
help="maximum accepted P-value of a change-point; "
"P denotes the probability that the change-point has "
"been found by a random coincidence, rather than a real "
"difference between the data distributions",
)
parser.add_argument(
"-M",
"--magnitude",
dest="magnitude",
type=float,
default=0.0,
help="minimum accepted magnitude of a change-point "
"computed as abs(new_mean / old_mean - 1.0); use it "
"to filter out stupidly small changes like < 0.01",
)
parser.add_argument(
"--window",
default=50,
type=int,
dest="window",
help="the number of data points analyzed at once; "
"the window size affects the discriminative "
"power of the change point detection algorithm; "
"large windows are less susceptible to noise; "
"however, a very large window may cause dismissing short regressions "
"as noise so it is best to keep it short enough to include not more "
"than a few change points (optimally at most 1)",
)
def analysis_options_from_args(args: argparse.Namespace) -> AnalysisOptions:
conf = AnalysisOptions()
if args.pvalue is not None:
conf.max_pvalue = args.pvalue
if args.magnitude is not None:
conf.min_magnitude = args.magnitude
if args.window is not None:
conf.window_len = args.window
return conf
def main():
logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO)
parser = argparse.ArgumentParser(description="Hunts performance regressions in Fallout results")
subparsers = parser.add_subparsers(dest="command")
list_tests_parser = subparsers.add_parser("list-tests", help="list available tests")
list_tests_parser.add_argument("group", help="name of the group of the tests", nargs="*")
list_metrics_parser = subparsers.add_parser(
"list-metrics", help="list available metrics for a test"
)
list_metrics_parser.add_argument("test", help="name of the test")
subparsers.add_parser("list-groups", help="list available groups of tests")
analyze_parser = subparsers.add_parser(
"analyze",
help="analyze performance test results",
formatter_class=argparse.RawTextHelpFormatter,
)
analyze_parser.add_argument("tests", help="name of the test or group of the tests", nargs="+")
analyze_parser.add_argument(
"--update-grafana",
help="Update Grafana dashboards with appropriate annotations of change points",
action="store_true",
)
analyze_parser.add_argument(
"--notify-slack",
help="Send notification containing a summary of change points to given Slack channels",
nargs="+",
)
analyze_parser.add_argument(
"--cph-report-since",
help="Sets a limit on the date range of the Change Point History reported to Slack. Same syntax as --since.",
metavar="DATE",
dest="cph_report_since",
)
setup_data_selector_parser(analyze_parser)
setup_analysis_options_parser(analyze_parser)
regressions_parser = subparsers.add_parser("regressions", help="find performance regressions")
regressions_parser.add_argument(
"tests", help="name of the test or group of the tests", nargs="+"
)
setup_data_selector_parser(regressions_parser)
setup_analysis_options_parser(regressions_parser)
remove_annotations_parser = subparsers.add_parser("remove-annotations")
remove_annotations_parser.add_argument(
"tests", help="name of the test or test group", nargs="*"
)
remove_annotations_parser.add_argument(
"--force", help="don't ask questions, just do it", dest="force", action="store_true"
)
validate_parser = subparsers.add_parser("validate",
help="validates the tests and metrics defined in the configuration")
try:
args = parser.parse_args()
conf = config.load_config()
hunter = Hunter(conf)
if args.command == "list-groups":
hunter.list_test_groups()
if args.command == "list-tests":
group_names = args.group if args.group else None
hunter.list_tests(group_names)
if args.command == "list-metrics":
test = hunter.get_test(args.test)
hunter.list_metrics(test)
if args.command == "analyze":
update_grafana_flag = args.update_grafana
slack_notification_channels = args.notify_slack
slack_cph_since = parse_datetime(args.cph_report_since)
data_selector = data_selector_from_args(args)
options = analysis_options_from_args(args)
tests = hunter.get_tests(*args.tests)
tests_analyzed_series = {test.name: None for test in tests}
for test in tests:
try:
analyzed_series = hunter.analyze(test, selector=data_selector, options=options)
if update_grafana_flag:
if not isinstance(test, GraphiteTestConfig):
raise GrafanaError(f"Not a Graphite test")
hunter.update_grafana_annotations(test, analyzed_series)
if slack_notification_channels:
tests_analyzed_series[test.name] = analyzed_series
except DataImportError as err:
logging.error(err.message)
except GrafanaError as err:
logging.error(
f"Failed to update grafana dashboards for {test.name}: {err.message}"
)
if slack_notification_channels:
hunter.notify_slack(
tests_analyzed_series,
selector=data_selector,
channels=slack_notification_channels,
since=slack_cph_since,
)
if args.command == "regressions":
data_selector = data_selector_from_args(args)
options = analysis_options_from_args(args)
tests = hunter.get_tests(*args.tests)
regressing_test_count = 0
errors = 0
for test in tests:
try:
regressions = hunter.regressions(
test, selector=data_selector, options=options
)
if regressions:
regressing_test_count += 1
except HunterError as err:
logging.error(err.message)
errors += 1
except DataImportError as err:
logging.error(err.message)
errors += 1
if regressing_test_count == 0:
print("No regressions found!")
elif regressing_test_count == 1:
print("Regressions in 1 test found")
else:
print(f"Regressions in {regressing_test_count} tests found")
if errors > 0:
print(f"Some tests were skipped due to import / analyze errors. Consult error log.")
if args.command == "remove-annotations":
if args.tests:
tests = hunter.get_tests(*args.tests)
for test in tests:
hunter.remove_grafana_annotations(test, args.force)
else:
hunter.remove_grafana_annotations(None, args.force)
if args.command == "validate":
hunter.validate()
if args.command is None:
parser.print_usage()
except ConfigError as err:
logging.error(err.message)
exit(1)
except TestConfigError as err:
logging.error(err.message)
exit(1)
except GraphiteError as err:
logging.error(err.message)
exit(1)
except GrafanaError as err:
logging.error(err.message)
exit(1)
except DataImportError as err:
logging.error(err.message)
exit(1)
except HunterError as err:
logging.error(err.message)
exit(1)
except DateFormatError as err:
logging.error(err.message)
exit(1)
except NotificationError as err:
logging.error(err.message)
exit(1)
if __name__ == "__main__":
main()
| 2.078125 | 2 |
docs/python/conf.py | jun-yoon/onnxruntime | 2 | 6745 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
import os
import sys
import shutil
# Check these extensions were installed.
import sphinx_gallery.gen_gallery
# The package should be installed in a virtual environment.
import onnxruntime
# The documentation requires two extensions available at:
# https://github.com/xadupre/sphinx-docfx-yaml
# https://github.com/xadupre/sphinx-docfx-markdown
import sphinx_modern_theme
# -- Project information -----------------------------------------------------
project = 'ONNX Runtime'
copyright = '2018, Microsoft'
author = 'Microsoft'
version = onnxruntime.__version__
release = version
# -- General configuration ---------------------------------------------------
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.imgmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
"sphinx.ext.autodoc",
'sphinx.ext.githubpages',
"sphinx_gallery.gen_gallery",
'sphinx.ext.autodoc',
"docfx_yaml.extension",
"docfx_markdown",
"pyquickhelper.sphinxext.sphinx_runpython_extension",
]
templates_path = ['_templates']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
source_suffix = ['.rst', '.md']
master_doc = 'intro'
language = "en"
exclude_patterns = []
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
html_theme = "sphinx_modern_theme"
html_theme_path = [sphinx_modern_theme.get_html_theme_path()]
html_logo = "../MSFT-Onnx-Runtime-11282019-Logo.png"
html_static_path = ['_static']
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for Sphinx Gallery ----------------------------------------------
sphinx_gallery_conf = {
'examples_dirs': 'examples',
'gallery_dirs': 'auto_examples',
}
# -- markdown options -----------------------------------------------------------
md_image_dest = "media"
md_link_replace = {
'#onnxruntimesessionoptionsenable-profiling)': '#class-onnxruntimesessionoptions)',
}
# -- Setup actions -----------------------------------------------------------
def setup(app):
# Placeholder to initialize the folder before
# generating the documentation.
app.add_stylesheet('_static/gallery.css')
# download examples for the documentation
this = os.path.abspath(os.path.dirname(__file__))
dest = os.path.join(this, "model.onnx")
if not os.path.exists(dest):
import urllib.request
url = 'https://raw.githubusercontent.com/onnx/onnx/master/onnx/backend/test/data/node/test_sigmoid/model.onnx'
urllib.request.urlretrieve(url, dest)
loc = os.path.split(dest)[-1]
if not os.path.exists(loc):
import shutil
shutil.copy(dest, loc)
return app
| 1.601563 | 2 |
traffic_sim/__main__.py | ngngardner/toc_project | 0 | 6746 | """Traffic simulator code."""
import sys
from os import path
from traffic_sim.analysis import TrafficExperiment
from traffic_sim.console import console
if not __package__:
_path = path.realpath(path.abspath(__file__))
sys.path.insert(0, path.dirname(path.dirname(_path)))
def main():
"""Run code from CLI."""
console.log('traffic sim')
num_trials = 30
ex = TrafficExperiment(
experiments=100,
trials=num_trials,
rows=10,
cols=10,
epochs=10,
)
ex.run()
ex.analyze()
if __name__ == '__main__':
main()
| 2.21875 | 2 |
TSFpy/debug/sample_fibonacci.py | ooblog/TSF1KEV | 0 | 6747 | #! /usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import division,print_function,absolute_import,unicode_literals
import sys
import os
os.chdir(sys.path[0])
sys.path.append('/mnt/sda2/github/TSF1KEV/TSFpy')
from TSF_io import *
#from TSF_Forth import *
from TSF_shuffle import *
from TSF_match import *
from TSF_calc import *
from TSF_time import *
TSF_Forth_init(TSF_io_argvs(),[TSF_shuffle_Initwords,TSF_match_Initwords,TSF_calc_Initwords,TSF_time_Initwords])
TSF_Forth_setTSF("TSF_Tab-Separated-Forth:",
"\t".join(["UTF-8","#TSF_encoding","200","#TSF_calcPR","N-Fibonacci:","#TSF_this","0","#TSF_fin."]),
TSF_style="T")
TSF_Forth_setTSF("N-Fibonacci:",
"\t".join(["TSF_argvs:","#TSF_cloneargvs","TSF_argvs:","#TSF_lenthe","[0]Z[Fibcount:0]~[TSF_argvs:0]","#TSF_calcDC","Fibcount:","0","#TSF_pokethe","Fibonacci:","#TSF_this"]),
TSF_style="T")
TSF_Forth_setTSF("Fibonacci:",
"\t".join(["[Fibcount:1]Z1~[Fibcount:1]","#TSF_calcDC","((2&(([0]+3)*[0]+2)^)/((2&(2*[0]+2)^)-(2&([0]+1)^)-1)\\1)#(2&([0]+1)^)","#TSF_calcDC","1","#TSF_echoN","[Fibcount:1]+1","#TSF_calcDC","Fibcount:","1","#TSF_pokethe","Fibjump:","[Fibcount:0]-([Fibcount:1]+1)o0~1","#TSF_calcDC","#TSF_peekthe","#TSF_this"]),
TSF_style="T")
TSF_Forth_setTSF("Fibcount:",
"\t".join(["20","-1"]),
TSF_style="T")
TSF_Forth_setTSF("Fibjump:",
"\t".join(["Fibonacci:","#exit"]),
TSF_style="T")
TSF_Forth_addfin(TSF_io_argvs())
TSF_Forth_argvsleftcut(TSF_io_argvs(),1)
TSF_Forth_run()
| 1.96875 | 2 |
Tomboy2Evernote.py | rguptan/Tomboy2Evernote | 0 | 6748 | <reponame>rguptan/Tomboy2Evernote<filename>Tomboy2Evernote.py<gh_stars>0
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import re
import sys, getopt
import glob
import os
def process_files(inputdir, outputdir):
os.chdir(inputdir)
enex_notes = []
output_filename = 'Tomboy2Evernote.enex'
i = 0
for file in glob.glob("*.note"):
note_file_path = inputdir + '/' + file
note_body = open(note_file_path, 'r').read()
title = get_title(note_body)
html_note_body = get_html_body(note_body)
created_date = tomboy_to_enex_date(get_created_date(note_body))
updated_date = tomboy_to_enex_date(get_updated_date(note_body))
enex_notes.append(make_enex(title, html_note_body, created_date, updated_date))
i += 1
multi_enex_body = make_multi_enex(enex_notes)
save_to_file(outputdir, output_filename, multi_enex_body)
print "Exported notes count: " + `i`
print "Evernote file location: " + outputdir + "/" + output_filename
def get_title(note_body):
title_regex = re.compile("<title>(.+?)</title>")
matches = title_regex.search(note_body);
if matches:
return matches.group(1)
else:
return "No Title"
def get_created_date(note_body):
created_date_regex = re.compile("<create-date>(.+?)</create-date>")
matches = created_date_regex.search(note_body);
if matches:
return matches.group(1)
else:
return "No Created Date"
def get_updated_date(note_body):
updated_date_regex = re.compile("<last-change-date>(.+?)</last-change-date>")
matches = updated_date_regex.search(note_body);
if matches:
return matches.group(1)
else:
return "No Updated Date"
def tomboy_to_enex_date(tomboy_date):
return re.sub(r"^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2}).*", r"\1\2\3T\4\5\6Z",
tomboy_date)
def get_html_body(note_body):
new_line = '¬BR¬'
xml_tag = r"<(\/?)[a-zA-Z0-9_\-:]+>"
start_xml_tag = r"<[a-zA-Z0-9_\-:]+>"
# make note body a one liner
note_body = note_body.replace('\n', new_line)
# get content
note_body = re.sub(r".*<note-content.+?>(.+?)</note-content>.*", r"\1", note_body)
# strip title until new_line or start_xml_tag
note_body = re.sub(r"^(.+?)(" + start_xml_tag + "|" + new_line + ")", r"\2", note_body)
# strip first two new lines, even if prefixed with an xml tag
tag = re.match("^" + start_xml_tag, note_body)
if tag != None:
note_body = re.sub(r"^" + start_xml_tag, r"", note_body)
note_body = re.sub(r"^(" + new_line + "){1,2}", r"", note_body)
if tag != None:
note_body = tag.group(0) + note_body
# links
note_body = re.sub(r"<link:internal>(.+?)</link:internal>", r"\1", note_body)
note_body = re.sub(r"<link:broken>(.+?)</link:broken>", r"\1", note_body)
p = re.compile(r"(<link:url>(.+?)</link:url>)")
for m in p.finditer(note_body):
if re.search(r"^([a-zA-Z0-9\._%+\-]+@(?:[a-zA-Z0-9\-]+\.)+[a-zA-Z]{2,10}|https?://.+)$", m.group(2)):
note_body = note_body.replace(m.group(1), '<a href="' + m.group(2) + '">' + m.group(2) + "</a>")
else:
note_body = note_body.replace(m.group(1), m.group(2))
# lists
note_body = re.sub(r"<(\/?)list>", r"<\1ul>", note_body)
note_body = re.sub(r'<list-item dir="ltr">', r"<li>", note_body)
note_body = re.sub(r"<(\/?)list-item>", r"<\1li>", note_body)
# higlight
note_body = re.sub(r"<highlight>(.+?)</highlight>", r'<span style="background:yellow">\1</span>', note_body)
# font size
note_body = re.sub(r"<size:small>(.+?)</size:small>", r'<span style="font-size:small">\1</span>', note_body)
note_body = re.sub(r"<size:large>(.+?)</size:large>", r'<span style="font-size:large">\1</span>', note_body)
note_body = re.sub(r"<size:huge>(.+?)</size:huge>", r'<span style="font-size:xx-large">\1</span>', note_body)
# text style
note_body = re.sub(r"<(\/?)monospace>", r"<\1code>", note_body)
note_body = re.sub(r"<(\/?)bold>", r"<\1b>", note_body)
note_body = re.sub(r"<(\/?)italic>", r"<\1i>", note_body)
note_body = re.sub(r"<(\/?)strikethrough>", r"<\1strike>", note_body)
# identation
note_body = re.sub(r"\t", r" ", note_body)
while re.search(new_line + " ", note_body) != None:
note_body = re.sub("(" + new_line + " *) ", r"\1 ", note_body)
# set new lines
note_body = note_body.replace(new_line, '<br/>\n')
return note_body
def make_enex(title, body, created_date, updated_date):
return '''<note><title>''' + title + '''</title><content><![CDATA[<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">
<en-note style="word-wrap: break-word; -webkit-nbsp-mode: space; -webkit-line-break: after-white-space;">
''' + body + '''
</en-note>]]></content><created>''' + created_date + '''</created><updated>''' + updated_date + '''</updated></note>'''
def make_multi_enex(multi_enex_body):
return '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE en-export SYSTEM "http://xml.evernote.com/pub/evernote-export2.dtd">
<en-export export-date="20150412T153431Z" application="Evernote/Windows" version="5.x">
''' + ''.join(multi_enex_body) + '''</en-export>'''
def save_to_file(outputdir, filename, body):
if not os.path.exists(outputdir):
os.makedirs(outputdir)
text_file = open(outputdir + '/' + filename, "w")
text_file.write(body)
text_file.close()
def get_help_line():
print 'Usage: ', sys.argv[0], ' -i <inputdir> -o <outputdir>'
def get_input_params(argv):
inputdir = ''
outputdir = ''
printhelpline = 0
try:
opts, args = getopt.getopt(argv, "hi:o:", ["idir=", "odir="])
except getopt.GetoptError:
exit_with_error()
for opt, arg in opts:
if opt == '-h':
get_help_line()
sys.exit()
elif opt in ("-i", "--idir"):
inputdir = arg
elif opt in ("-o", "--odir"):
outputdir = arg
if (inputdir == ""):
print "Error: Missing input folder"
printhelpline = 1
if (outputdir == ""):
print "Error: Missing output folder"
printhelpline = 1
if printhelpline == 1:
exit_with_error()
return (inputdir, outputdir)
def exit_with_error():
get_help_line()
sys.exit(2)
def main(argv):
inputdir, outputdir = get_input_params(argv)
process_files(inputdir, outputdir)
if __name__ == "__main__":
main(sys.argv[1:])
| 2.703125 | 3 |
demo.py | williamfzc/pyat | 20 | 6749 | from pyatool import PYAToolkit
# 个性化的函数需要toolkit形参,即使不需要使用
def test_b(toolkit):
return 'i am test_b, running on {}'.format(toolkit.device_id)
# 封装adb命令成为方法
PYAToolkit.bind_cmd(func_name='test_a', command='shell pm list package | grep google')
# 或者绑定个性化的函数
PYAToolkit.bind_func(real_func=test_b)
# 是否需要log
PYAToolkit.switch_logger(True)
# 初始化
d = PYAToolkit('123456F')
assert d.is_connected()
# 它也支持远程控制(还不够稳定,暂不推荐
# d = PYAToolkit('123456F', mode='remote')
# 已经绑定的方法直接调用即可
result = d.test_a()
# 可能的输出
# package:com.google.android.webview
# 个性化函数也一样
result = d.test_b()
# i am test_b, running on 123456F
# 也可以通过 `std` 或 `standard_func` 调用(会有代码自动补全,比较方便)
# 仅限标准库,自己拓展的库只支持直接调用
d.std.get_current_activity(toolkit=d)
# 获取所有已经注册的函数
all_functions = d.current_function()
print(all_functions)
# 下面列举所有标准函数的使用方法,有任何问题欢迎反馈或自己改
# 打印出机器id,仅供测试用
d.hello_world()
# 展示所有已安装的包
installed_package = d.show_package()
# 栈顶活动名
current_activity_name = d.get_current_activity()
# 安装指定apk(支持url与path),例子里的安装可能比较久因为是从github下的,可以自己改
d.install_from(url=r'https://github.com/williamfzc/simhand2/releases/download/v0.1.2/app-debug.apk')
# d.install_from(path=r'/Users/admin/some_path/some_apk.apk')
# 检测包是否已安装
target_package_name = 'com.github.williamfzc.simhand2'
is_installed = d.is_installed(package_name=target_package_name)
# 清理缓存
d.clean_cache(target_package_name)
if is_installed:
d.uninstall(target_package_name)
# 获取手机ip
local_address = d.get_ip_address()
print(local_address)
# 切换wifi状态
d.switch_wifi(False)
# 切换飞行模式
d.switch_airplane(True)
d.switch_airplane(False)
d.switch_wifi(True)
# 切换输入法
d.set_ime('com.sohu.inputmethod.sogouoem/.SogouIME')
# push and pull
d.push('./README.md', '/sdcard/')
d.pull('/sdcard/README.md', './haha.md')
# send keyevent
d.input_key_event(26)
d.input_key_event(26)
# swipe
d.swipe(500, 1200, 500, 200)
# click
d.click(200, 200)
| 2.234375 | 2 |
nnlab/nn/graph.py | nlab-mpg/nnlab | 0 | 6750 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
from six.moves import xrange, zip
import tensorflow as tf
from .tensor import Tensor
class Graph(object):
"""The class for defining computational graph."""
def __init__(self, loss=None, modules=None, inputs=None, outputs=None, monitors=None):
self._loss = loss
self._modules = modules if modules is not None else []
self._inputs = inputs
self._outputs = outputs
self._monitors = monitors
self._check_arguments(loss, modules, inputs, outputs, monitors)
def _check_arguments(self, loss, modules, inputs, outputs, monitors):
"""Verify the arguments."""
if loss is not None and not isinstance(loss, Tensor):
raise Exception("loss must be a tensor")
if modules is not None and not isinstance(modules, list):
raise Exception("modules must be a list")
if inputs is not None and not self._check_type(inputs):
raise Exception("input must be a tensor/list/dict")
if outputs is not None and not self._check_type(outputs):
raise Exception("output must be a tensor/list/dict")
if monitors is not None and not isinstance(monitors, dict):
raise Exception("monitors must be a dict")
def _check_type(self, obj):
"""Check whether the type is either a tensor or list or dict"""
return isinstance(obj, Tensor) or isinstance(obj, list) or isinstance(obj, dict)
@property
def loss(self):
return self._loss
@property
def modules(self):
return self._modules
@property
def inputs(self):
return self._inputs
| 2.84375 | 3 |
local-rotations.py | katiekruzan/masters-thesis | 0 | 6751 | """
Here we're going to code for the local rotations. We're doing an object oriented approach
Left and right are in reference to the origin
"""
__version__ = 1.0
__author__ = '<NAME>'
import string # just to get the alphabet easily iterable
import sys # This just helps us in our printing
from typing import Dict # This helps us in our documentation
# Getting the structure for the classes we're putting together
class Segment:
"""
These are going to represent the outer segments and the mysteries they hold.
The segments will be adjacent to 2 outer nodes
"""
def __init__(self, name: str):
"""
Initialize the segment, keeping a place for the right left outer vertices to which it is adjacent
:param name: How we will reference this segment. In this implementation, it is expected to be a negative integer
"""
self.leftOuter = None
self.rightOuter = None
self.name = name
def getName(self) -> str:
"""
Return the name we gave to this segment.
:return: name
"""
return self.name
def getLeftOuter(self):
"""
Return the outer node to the left of this segment with respect to the origin
:return: leftOuter
"""
return self.leftOuter
def getRightOuter(self):
"""
Return the outer node to the right of this segment with respect to the origin
:return: rightOuter
"""
return self.rightOuter
def setLeftOuter(self, left):
"""
Set the outer node to the left of this segment with respect to the origin
Also, set left's right segment to this segment.
:param left: A outer node object to be referenced as this segment's left outer node
:return: None
"""
self.leftOuter = left
if left.getRightSegment() is None:
left.setRightSegment(self)
def setRightOuter(self, right):
"""
Set the outer node to the right of this segment with respect to the origin
Also, set right's left segment to this segment.
:param right: A outer node object to be referenced as this segment's right outer node
:return: None
"""
self.rightOuter = right
if right.getLeftSegment() is None:
right.setLeftSegment(self)
def isValidObject(self) -> bool:
"""
Checks to see if this segment has been full initialized.
:return: valid returns true if it has both the left and right outer nodes set
"""
if (self.leftOuter is None) or (self.rightOuter is None):
return False
return True
def toString(self) -> str:
"""
Returns a formatted string of the left and right outer nodes this is associated with
:return: Description string
"""
return 'left Outer: ' + self.leftOuter.getName() + '\nright Outer: ' + self.rightOuter.getName()
class Outer:
"""
Class to represent the outer vertices that are adjacent to an inner vertex and 2 outer segments
"""
def __init__(self, name: str):
"""
Initialize the outer node
Keeping a place for the inner vertex and right and left outer segments to which it is adjacent.
:param name: How we will reference this outer node. In this implementation, it is expected to be a positive integer
"""
self.adjInner = None
self.leftSegment = None
self.rightSegment = None
self.name = name
def getName(self) -> str:
"""
Return the name we gave to this outer node.
:return: name
"""
return self.name
def getLeftSegment(self) -> Segment:
"""
Return the segment object to the left of this outer node with respect to the origin
:return: leftSegment
"""
return self.leftSegment
def getRightSegment(self) -> Segment:
"""
Return the segment object to the right of this outer node with respect to the origin
:return: rightSegment
"""
return self.rightSegment
def getAdjInner(self):
"""
Return the inner node object adjacent to this outer note object
:return: adjInner
"""
return self.adjInner
def setLeftSegment(self, left: Segment):
"""
Set the segment to the left of this outer node with respect to the origin
Also, set left's right outer node to self.
:param left: A segment object to be referenced as this node's left outer segment
:return: None
"""
self.leftSegment = left
if left.getRightOuter() is None:
left.setRightOuter(self)
def setRightSegment(self, right: Segment):
"""
Set the segment to the right of this outer node with respect to the origin
Also, set right's left outer node to self.
:param right: A segment object to be referenced as this node's right outer segment
:return: None
"""
self.rightSegment = right
if right.getLeftOuter() is None:
right.setLeftOuter(self)
def setAdjInner(self, inner):
"""
Set the inner node adjacent to this outer node
Also, set inner's adjacent outer node to self.
:param inner: A inner node object to be referenced as this node's adjacent inner node
:return: None
"""
self.adjInner = inner
if inner.getAdjOuter() is None:
inner.setAdjOuter(self)
def isValidObject(self) -> bool:
"""
Checks to see if this outer node has been full initialized.
:return: valid returns true if it has the left segment, right segment, and inner node set
"""
if (self.leftSegment is None) or (self.rightSegment is None) or (self.adjInner is None):
return False
return True
def toString(self) -> str:
"""
Returns a formatted string of the left segment, right segment, and inner node this outer node is associated with
:return: Description string
"""
return 'left Segment: ' + self.leftSegment.getName() + '\nright Segment: ' + self.rightSegment.getName() \
+ '\nadj Inner: ' + self.adjInner.getName()
class Inner:
"""
Class to represent the inner vertices that are adjacent to an outer vertex and 2 neighboring inner vertices
"""
def __init__(self, name: str):
"""
Initialize the inner node object
Keeping a place for the outer vertex and right and left adjacent inner nodes.
:param name: How we will reference this inner node. In this implementation, it is expected to be a lowercase letter
"""
self.adjOuter = None
self.leftInner = None
self.rightInner = None
self.name = name
def getName(self) -> str:
"""
Return the name we gave to this inner node.
:return: name
"""
return self.name
def getLeftInner(self):
"""
Return the inner node object to the left of this inner node with respect to the origin
:return: leftInner
"""
return self.leftInner
def getRightInner(self):
"""
Return the inner node object to the right of this inner node with respect to the origin
:return: rightInner
"""
return self.rightInner
def getAdjOuter(self) -> Outer:
"""
Return the outer node object adjacent to this inner node
:return: adjOuter
"""
return self.adjOuter
def setLeftInner(self, left):
"""
Set the inner node to the left of this inner node with respect to the origin
Also, set left's right inner node to self.
:param left: An inner node object to be referenced as this node's left inner node
:return: None
"""
self.leftInner = left
if left.getRightInner() is None:
left.setRightInner(self)
def setRightInner(self, right):
"""
Set the inner node to the right of this inner node with respect to the origin
Also, set right's left inner node to self.
:param right: An inner node object to be referenced as this node's right inner node
:return: None
"""
self.rightInner = right
if right.getLeftInner() is None:
right.setLeftInner(self)
def setAdjOuter(self, outer: Outer):
"""
Set the outer node adjacent to this inner node
Also, set outer's adjacent inner node to self.
:param outer: An outer node object to be referenced as this node's adjacent outer node
:return: None
"""
self.adjOuter = outer
if outer.getAdjInner() is None:
outer.setAdjInner(self)
def isValidObject(self) -> bool:
"""
Checks to see if this inner node has been full initialized.
:return: valid returns true if it has the left inner node, right inner node, and adjacent outer node set
"""
if (self.leftInner is None) or (self.rightInner is None) or (self.adjOuter is None):
return False
return True
def toString(self) -> str:
"""
Returns a formatted string of the left inner node, right inner node, and adjacent outer node this inner node
is associated with
:return: Description string
"""
return 'left Inner: ' + self.leftInner.getName() + '\nright Inner: ' + self.rightInner.getName() \
+ '\nadj Outer: ' + self.adjOuter.getName()
def standardCircle(num_verts: int) -> (Dict[str, Segment], Dict[str, Outer], Dict[str, Inner]):
"""
This will go through and initialize our standard starting circle
:param num_verts: the number of outer nodes we will have
:returns: tuple(segs, outs, inns)
-segs - dictionary of str: Segment objects in the circle \\
-outs - dictionary of str: Outer objects in the circle \\
-inns - dictionary of str: Inner objects in the circle
"""
# Initializing our dictionaries
segs = dict()
outs = dict()
inns = dict()
# Running through the number of vertices we will be edning up with
for i in range(num_verts):
# start with an inner node - labeling with lowercase letters
inn = Inner(string.ascii_letters[i])
# If we aren't on the first one, connect it to the previous one.
if i != 0:
inn.setLeftInner(inns[string.ascii_letters[i - 1]])
# If we've hit the end of the line, go ahead and close up the circle.
if i == num_verts - 1:
inn.setRightInner(inns[string.ascii_letters[0]])
# then make the outer
out = Outer(str(i + 1))
# Go ahead and connect the inner we just made with this outer node
out.setAdjInner(inn)
# If we aren't on the first one, go ahead and connect it to the previous segment
if i != 0:
out.setLeftSegment(segs[str(-i)])
# Now time to make the segment
seg = Segment(str(-i - 1))
# Go ahead and connect the outer node we just made with this segment
seg.setLeftOuter(out)
# If we're at the end of the circle, then we close it up. Otherwise, move on
if i == num_verts - 1:
seg.setRightOuter(outs[str(1)])
# add them to our dictionaries
segs[seg.getName()] = seg
outs[out.getName()] = out
inns[inn.getName()] = inn
# If we've made it here, then we've made the full circle and are ready to return it
return segs, outs, inns
def findTheFace(source_in: Inner) -> list:
"""
This will take an inner node and use the algorithm to walk the face that it is on.
The order of the face will be i, o, s, o, i repeat
:param source_in: Inner node object we are starting from.
:return: face: a list representing the face. This list is of inner, outer, and segment objects in the
order i, o, s, o, i, repeat.
"""
# initialize the list
face = list()
# starting the face with the source inner node.
face.append(source_in)
# initialize the ending inner node we will be using for comparison
end_in = None
# As long as we haven't looped back around, go through the following process.
while source_in != end_in:
# inner: find adjacent outer
face.append(face[-1].getAdjOuter())
# outer: go to right seg
face.append(face[-1].getRightSegment())
# segment: go to right outer
face.append(face[-1].getRightOuter())
# outer: then adj inner
face.append(face[-1].getAdjInner())
# then left inner and repeat.
# set this inner node as our node to compare to our starting node.
end_in = face[-1].getLeftInner()
face.append(end_in)
return face
def faceCannonOrder(face: list) -> list:
"""
Just list the face with the face elements in order.
We will do it with the first numerical face, and then go right before it for an order that will be consistent.
:param face: a list representing the face. This list is of inner, outer, and segment objects in the
order i, o, s, o, i, repeat.
:return: ordered face in canonical order
"""
# find the first numerical face then go right before it
# initialize face num as a relatively high number we won't encounter
facenum = 333
# initialize the int for where we will split the list
start_ind = 0
# loop through and find the face we want to find
for i in range(len(face)):
try:
if int(face[i].getName()) < facenum:
# To get here, we must have found a lower face
# keep track of where this is located in the list
start_ind = i - 1
# make our current lowest face the new lowest face to keep comparing to.
facenum = int(face[i].getName())
# if we try casting a letter to a number, python will get upset, but that also means we're looking at
# an inner node, which we don't want for this anyways.
except ValueError:
continue
# make our ordered face getting from the starting index to the end, then wrapping around and getting the rest of
# the face
ord_face = face[start_ind:] + face[:start_ind]
# go through and make sure we don't have any duplicate elements right by each other. If we do, then drop them.
for i in range(len(ord_face) - 1):
if ord_face[i].toString() == ord_face[i + 1].toString():
ord_face.pop(i)
break
# return the ordered face
return ord_face
def grabAllTheFaces(inns: Dict[str, Inner]) -> list:
"""
Function to get the list of unique faces for our circle.
:param inns: dictionary of Inner objects. We will loop through these to get the faces
:return: faces: List of distinct faces in canonical order.
"""
# initialize the list of faces
faces = list()
# a set of all the elements we have covered by the faces. Will use this for a completeness check
covered = set()
# run through every inner node we've been given
for inn in inns:
# Generate the face that inner node lies on
face = findTheFace(inns[inn])
# put the face we've gotten in canonical order
face = faceCannonOrder(face)
# Check if we've already captured it.
if face not in faces:
# If not, then add it to our list of faces
faces.append(face)
# Go ahead and add the elements in this face to our covered set
covered.update(face)
# check we've gotten all the elements
if len(covered) == (3 * len(inns)):
print('We got em!!!')
# Now return a list of all the faces we have.
return faces
def printCircleStatus(segs: Dict[str, Segment], outs: Dict[str, Outer], inns: Dict[str, Inner]):
"""
Helper function that prints the status of the circle to the console
:param segs: dictionary of str: Segment objects in the circle
:param outs: dictionary of str: Outer objects in the circle
:param inns: dictionary of str: Inner objects in the circle
:return: None
"""
# Run through the segments
print('\nSegments:')
for k in segs:
print()
print(k)
print(segs[k].toString())
# Run through the Outer nodes
print('\nOuters:')
for k in outs:
print()
print(k)
print(outs[k].toString())
# Run through the Inner nodes
print('\nInners:')
for k in inns:
print()
print(k)
print(inns[k].toString())
if __name__ == '__main__':
# This is where you change the variables.
# must be a positive integer > 2
verts = 12
# Must be a string with spaces between each element. If you want to denote multiple cycles, you must add a |
switch_txt = '2 3 4 5 | 12 7'
# we're going to make a list of all the switches and all the cycles
switches = list()
# first, we get the cycles, split by '|'
cycles = switch_txt.split('|')
for c in cycles:
# We're going to split the switch into a list split by the whitespace
s = c.strip().split()
# Then we're going to append the switches in the cycle to the new list
switches.append(s)
# Go ahead and make the standard circle given the number of vertices we want to use.
segments, outers, inners = standardCircle(verts)
# Go through and grab the faces for our standard circle
facs = grabAllTheFaces(inners)
print('\nPrinting the faces')
for f in facs:
print()
for p in f:
sys.stdout.write(p.getName() + ' ')
# Go through and do the switches for each cycle
for switch in switches:
for num in range(len(switch)):
# store the current part of the switch we're working on
cs = switch[num]
# store the next part of the switch we're working on, looping to the beginning if we're at the end
ns = switch[(num + 1) % len(switch)]
# Do the actual switch
# Getting the new inner and outer validly switched up
inners[string.ascii_letters[int(cs) - 1]].setAdjOuter(outers[ns])
outers[ns].setAdjInner(inners[string.ascii_letters[int(cs) - 1]])
# print how the final rotation sits
printCircleStatus(segments, outers, inners)
# Go through and generate and print the new faces
new_facs = grabAllTheFaces(inners)
print('\nPrinting the new faces')
for f in new_facs:
print()
for p in f:
sys.stdout.write(p.getName() + ' ')
| 4.15625 | 4 |
PT-FROST/frost.py | EtienneDavid/FROST | 2 | 6752 | import random
import argparse
import numpy as np
import pandas as pd
import os
import time
import string
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from tqdm import tqdm
from model import WideResnet
from cifar import get_train_loader, get_val_loader
from label_guessor import LabelGuessor
from lr_scheduler import WarmupCosineLrScheduler
from ema import EMA
import utils
## args
parser = argparse.ArgumentParser(description=' FixMatch Training')
parser.add_argument('--wresnet-k', default=2, type=int, help='width factor of wide resnet')
parser.add_argument('--wresnet-n', default=28, type=int, help='depth of wide resnet')
parser.add_argument('--n-classes', type=int, default=10, help='number of classes in dataset')
parser.add_argument('--n-labeled', type=int, default=10, help='number of labeled samples for training')
parser.add_argument('--n-epochs', type=int, default=256, help='number of training epochs')
parser.add_argument('--batchsize', type=int, default=64, help='train batch size of labeled samples')
parser.add_argument('--mu', type=int, default=7, help='factor of train batch size of unlabeled samples')
parser.add_argument('--mu-c', type=int, default=1, help='factor of train batch size of contrastive learing samples')
parser.add_argument('--thr', type=float, default=0.95, help='pseudo label threshold')
parser.add_argument('--n-imgs-per-epoch', type=int, default=50000, help='number of training images for each epoch')
parser.add_argument('--lam-x', type=float, default=1., help='coefficient of labeled loss')
parser.add_argument('--lam-u', type=float, default=1., help='coefficient of unlabeled loss')
parser.add_argument('--lam-clr', type=float, default=1., help='coefficient of contrastive loss')
parser.add_argument('--ema-alpha', type=float, default=0.999, help='decay rate for ema module')
parser.add_argument('--lr', type=float, default=0.03, help='learning rate for training')
parser.add_argument('--weight-decay', type=float, default=5e-4, help='weight decay')
parser.add_argument('--momentum', type=float, default=0.9, help='momentum for optimizer')
parser.add_argument('--seed', type=int, default=-1, help='seed for random behaviors, no seed if negtive')
parser.add_argument('--feature_dim', default=128, type=int, help='Feature dim for latent vector')
parser.add_argument('--temperature', default=0.5, type=float, help='Temperature used in softmax')
parser.add_argument('--k', default=200, type=int, help='Top k most similar images used to predict the label')
parser.add_argument('--test', default=0, type=int, help='0 is softmax test function, 1 is similarity test function')
parser.add_argument('--bootstrap', type=int, default=16, help='Bootstrapping factor (default=16)')
parser.add_argument('--boot-schedule', type=int, default=1, help='Bootstrapping schedule (default=1)')
parser.add_argument('--balance', type=int, default=0, help='Balance class methods to use (default=0 None)')
parser.add_argument('--delT', type=float, default=0.2, help='Class balance threshold delta (default=0.2)')
args = parser.parse_args()
print(args)
# save results
save_name_pre = '{}_E{}_B{}_LX{}_LU{}_LCLR{}_THR{}_LR{}_WD{}'.format(args.n_labeled, args.n_epochs, args.batchsize,
args.lam_x, args.lam_u, args.lam_clr, args.thr, args.lr, args.weight_decay)
ticks = time.time()
result_dir = 'results/' + save_name_pre + '.' + str(ticks)
if not os.path.exists(result_dir):
os.mkdir(result_dir)
def set_model():
model = WideResnet(args.n_classes, k=args.wresnet_k, n=args.wresnet_n, feature_dim=args.feature_dim) # wresnet-28-2
model.train()
model.cuda()
criteria_x = nn.CrossEntropyLoss().cuda()
criteria_u = nn.CrossEntropyLoss().cuda()
return model, criteria_x, criteria_u
def train_one_epoch(
model,
criteria_x,
criteria_u,
optim,
lr_schdlr,
ema,
dltrain_x,
dltrain_u,
dltrain_all,
lb_guessor,
):
loss_avg, loss_x_avg, loss_u_avg, loss_clr_avg = [], [], [], []
epsilon = 0.000001
dl_u, dl_all = iter(dltrain_u), iter(dltrain_all)
for _, _, ims_all_1, ims_all_2, _ in tqdm(dl_all, desc='Training ...'):
ims_u_weak, ims_u_strong, _, _, lbs_u = next(dl_u)
loss_x, loss_u, loss_clr = torch.tensor(0).cuda(), torch.tensor(0).cuda(), torch.tensor(0).cuda()
fv_1, fv_2 = torch.tensor(0).cuda(), torch.tensor(0).cuda()
ims_u_weak = ims_u_weak.cuda()
ims_u_strong = ims_u_strong.cuda()
ims_all_1 = ims_all_1.cuda(non_blocking=True)
ims_all_2 = ims_all_2.cuda(non_blocking=True)
dl_x = iter(dltrain_x)
ims_x_weak, _, _, _, lbs_x = next(dl_x)
ims_x_weak = ims_x_weak.cuda()
lbs_x = lbs_x.cuda()
n_x, n_u, n_all = 0, 0, 0
if args.lam_u >= epsilon and args.lam_clr >= epsilon: #pseudo-labeling and Contrasive learning
lbs_u, valid_u, mask_u = lb_guessor(model, ims_u_weak, args.balance, args.delT)
ims_u_strong = ims_u_strong[valid_u]
n_x, n_u, n_all = ims_x_weak.size(0), ims_u_strong.size(0), ims_all_1.size(0)
if n_u != 0:
ims_x_u_all_1 = torch.cat([ims_x_weak, ims_u_strong, ims_all_1], dim=0).detach()
ims_x_u_all_2 = torch.cat([ims_x_weak, ims_u_strong, ims_all_2], dim=0).detach()
logits_x_u_all_1, fv_1, z_1 = model(ims_x_u_all_1)
logits_x_u_all_2, fv_2, z_2 = model(ims_x_u_all_2)
logits_x_u_all = (logits_x_u_all_1 + logits_x_u_all_2) / 2
logits_x, logits_u = logits_x_u_all[:n_x], logits_x_u_all[n_x:(n_x + n_u)]
loss_x = criteria_x(logits_x, lbs_x)
if args.balance == 2 or args.balance == 3:
loss_u = (F.cross_entropy(logits_u, lbs_u, reduction='none') * mask_u).mean()
else:
loss_u = criteria_u(logits_u, lbs_u)
else: # n_u == 0
ims_x_all_1 = torch.cat([ims_x_weak, ims_all_1], dim=0).detach()
ims_x_all_2 = torch.cat([ims_x_weak, ims_all_2], dim=0).detach()
logits_x_all_1, fv_1, z_1 = model(ims_x_all_1)
logits_x_all_2, fv_2, z_2 = model(ims_x_all_2)
logits_x_all = (logits_x_all_1 + logits_x_all_2) / 2
logits_x = logits_x_all[:n_x]
loss_x = criteria_x(logits_x, lbs_x)
loss_u = torch.tensor(0)
elif args.lam_u >= epsilon: #lam_clr == 0: pseudo-labeling only
lbs_u, valid_u, mask_u = lb_guessor(model, ims_u_weak, args.balance, args.delT)
ims_u_strong = ims_u_strong[valid_u]
n_x, n_u = ims_x_weak.size(0), ims_u_strong.size(0)
if n_u != 0:
ims_x_u = torch.cat([ims_x_weak, ims_u_strong], dim=0).detach()
logits_x_u, _, _ = model(ims_x_u)
logits_x, logits_u = logits_x_u[:n_x], logits_x_u[n_x:]
loss_x = criteria_x(logits_x, lbs_x)
if args.balance == 2 or args.balance == 3:
loss_u = (F.cross_entropy(logits_u, lbs_u, reduction='none') * mask_u).mean()
else:
loss_u = criteria_u(logits_u, lbs_u)
else: # n_u == 0
logits_x, _, _ = model(ims_x_weak)
loss_x = criteria_x(logits_x, lbs_x)
loss_u = torch.tensor(0)
else: #lam_u == 0: contrastive learning only
n_x, n_all = ims_x_weak.size(0), ims_all_1.size(0)
ims_x_all_1 = torch.cat([ims_x_weak, ims_all_1], dim=0).detach()
ims_x_all_2 = torch.cat([ims_x_weak, ims_all_2], dim=0).detach()
logits_x_all_1, fv_1, z_1 = model(ims_x_all_1)
logits_x_all_2, fv_2, z_2 = model(ims_x_all_2)
logits_x_all = (logits_x_all_1 + logits_x_all_2) / 2
logits_x = logits_x_all[:n_x]
loss_x = criteria_x(logits_x, lbs_x)
loss_u = torch.tensor(0)
if args.lam_clr >= epsilon:
#compute l_clr
fv_1 = fv_1[(n_x + n_u):]
fv_2 = fv_2[(n_x + n_u):]
z_1 = z_1[(n_x + n_u):]
z_2 = z_2[(n_x + n_u):]
#[2*muc*B, D]
z = torch.cat([z_1, z_2], dim=0)
#[2*muc*B, 2*muc*B]
sim_matrix = torch.exp(torch.mm(z, z.t().contiguous()) / args.temperature) #denominator
#[2*muc*B, 2*muc*B]
# mask = (torch.ones_like(sim_matrix) - torch.eye(2 * args.mu_c * args.batchsize, device=sim_matrix.device)).bool()
mask = (torch.ones_like(sim_matrix) - torch.eye(2 * args.mu_c * args.batchsize, device=sim_matrix.device))
mask = mask > 0
#[2*muc*B, 2*muc*B - 1]
sim_matrix = sim_matrix.masked_select(mask).view(2 * args.mu_c * args.batchsize, -1)
#[muc*B]
pos_sim = torch.exp(torch.sum(z_1 * z_2, dim=-1) / args.temperature) #numerator
#[2*muc*B]
pos_sim = torch.cat([pos_sim, pos_sim], dim=0)
loss_clr = (- torch.log(pos_sim / sim_matrix.sum(dim=-1))).mean()
#compute loss
loss = args.lam_x * loss_x + args.lam_u * loss_u + args.lam_clr * loss_clr
optim.zero_grad()
loss.backward()
optim.step()
ema.update_params()
lr_schdlr.step()
loss_x_avg.append(loss_x.item())
loss_u_avg.append(loss_u.item())
loss_clr_avg.append(loss_clr.item())
loss_avg.append(loss.item())
ema.update_buffer()
def evaluate(ema):
ema.apply_shadow()
ema.model.eval()
ema.model.cuda()
dlval = get_val_loader(batch_size=128, num_workers=0)
matches = []
for ims, lbs in dlval:
ims = ims.cuda()
lbs = lbs.cuda()
with torch.no_grad():
logits, _, _ = ema.model(ims)
scores = torch.softmax(logits, dim=1)
_, preds = torch.max(scores, dim=1)
match = lbs == preds
matches.append(match)
matches = torch.cat(matches, dim=0).float()
acc = torch.mean(matches)
ema.restore()
return acc
def test(model, memory_data_loader, test_data_loader, c, epoch):
model.eval()
total_top1, total_top5, total_num, feature_bank, feature_labels = 0.0, 0.0, 0, [], []
with torch.no_grad():
# generate feature bank
for data, _, _ in tqdm(memory_data_loader, desc='Feature extracting'):
logits, feature, _ = model(data.cuda(non_blocking=True))
feature_bank.append(feature)
feature_labels.append(torch.tensor(torch.argmax(logits,dim=1),dtype=torch.int64))
# [D, N]
feature_bank = torch.cat(feature_bank, dim=0).t().contiguous()
# [N]
feature_labels = torch.cat(feature_labels, dim=0).contiguous().cpu()
# loop test data to predict the label by weighted knn search
test_bar = tqdm(test_data_loader)
for data, _, target in test_bar:
# data, target = data.cuda(non_blocking=True), target.cuda(non_blocking=True)
data = data.cuda(non_blocking=True)
_, feature, _ = model(data)
total_num += data.size(0)
# compute cos similarity between each feature vector and feature bank ---> [B, N]
sim_matrix = torch.mm(feature, feature_bank)
# [B, K]
sim_weight, sim_indices = sim_matrix.topk(k=args.k, dim=-1)
# [B, K]
# sim_labels = torch.gather(feature_labels.expand(data.size(0), -1), dim=-1, index=sim_indices)
sim_labels = torch.gather(feature_labels.expand(data.size(0), -1), dim=-1, index=sim_indices.cpu())
sim_weight = (sim_weight / args.temperature).exp()
# counts for each class
one_hot_label = torch.zeros(data.size(0) * args.k, c, device=sim_labels.device)
# [B*K, C]
one_hot_label = one_hot_label.scatter(-1, sim_labels.view(-1, 1), 1.0)
# weighted score ---> [B, C]
pred_scores = torch.sum(one_hot_label.view(data.size(0), -1, c) * sim_weight.cpu().unsqueeze(dim=-1), dim=1)
pred_labels = pred_scores.argsort(dim=-1, descending=True)
total_top1 += torch.sum((pred_labels[:, :1] == target.unsqueeze(dim=-1)).any(dim=-1).float()).item()
test_bar.set_description('Test Epoch: [{}/{}] Acc@1:{:.2f}%'
.format(epoch, args.n_epochs, total_top1 / total_num * 100))
return total_top1 / total_num * 100
def get_random_string(length):
letters = string.ascii_lowercase
result_str = ''.join(random.choice(letters) for i in range(length))
return result_str
def sort_unlabeled(ema,numPerClass):
ema.apply_shadow()
ema.model.eval()
ema.model.cuda()
n_iters_per_epoch = args.n_imgs_per_epoch // args.batchsize
_, _, dltrain_all = get_train_loader(args.batchsize, 1, 1, n_iters_per_epoch, L=args.n_classes*numPerClass, seed=args.seed)
predicted = []
labels = []
for ims_w, _, _, _, lbs in dltrain_all:
ims = ims_w.cuda()
labels.append(lbs)
with torch.no_grad():
logits, _, _ = ema.model(ims)
scores = torch.softmax(logits, dim=1)
predicted.append(scores.cpu())
print( "labels ",len(labels))
labels = np.concatenate(labels, axis=0)
print( "labels ",len(labels))
predicted = np.concatenate( predicted, axis=0)
preds = predicted.argmax(1)
probs = predicted.max(1)
top = np.argsort(-probs,axis=0)
del dltrain_all, logits
labeledSize =args.n_classes * numPerClass
unique_train_pseudo_labels, unique_train_counts = np.unique(preds, return_counts=True)
print("Number of training pseudo-labels in each class: ", unique_train_counts," for classes: ", unique_train_pseudo_labels)
sortByClass = np.random.randint(0,high=len(top), size=(args.n_classes, numPerClass), dtype=int)
indx = np.zeros([args.n_classes], dtype=int)
matches = np.zeros([args.n_classes, numPerClass], dtype=int)
labls = preds[top]
samples = top
for i in range(len(top)):
if indx[labls[i]] < numPerClass:
sortByClass[labls[i], indx[labls[i]]] = samples[i]
if labls[i] == labels[top[i]]:
matches[labls[i], indx[labls[i]]] = 1
indx[labls[i]] += 1
if min(indx) < numPerClass:
print("Counts of at least one class ", indx, " is lower than ", numPerClass)
name = "dataset/seeds/size"+str(labeledSize)+"." + get_random_string(8) + ".npy"
np.save(name, sortByClass[0:args.n_classes, :numPerClass])
classAcc = 100*np.sum(matches, axis=1)/numPerClass
print("Accuracy of the predicted pseudo-labels: top ", labeledSize, ", ", np.mean(classAcc), classAcc )
ema.restore()
return name
def train():
n_iters_per_epoch = args.n_imgs_per_epoch // args.batchsize
n_iters_all = n_iters_per_epoch * args.n_epochs #/ args.mu_c
epsilon = 0.000001
model, criteria_x, criteria_u = set_model()
lb_guessor = LabelGuessor(thresh=args.thr)
ema = EMA(model, args.ema_alpha)
wd_params, non_wd_params = [], []
for param in model.parameters():
if len(param.size()) == 1:
non_wd_params.append(param)
else:
wd_params.append(param)
param_list = [{'params': wd_params}, {'params': non_wd_params, 'weight_decay': 0}]
optim = torch.optim.SGD(param_list, lr=args.lr, weight_decay=args.weight_decay, momentum=args.momentum, nesterov=True)
lr_schdlr = WarmupCosineLrScheduler(optim, max_iter=n_iters_all, warmup_iter=0)
dltrain_x, dltrain_u, dltrain_all = get_train_loader(args.batchsize, args.mu, args.mu_c, n_iters_per_epoch,
L=args.n_labeled, seed=args.seed)
train_args = dict(
model=model,
criteria_x=criteria_x,
criteria_u=criteria_u,
optim=optim,
lr_schdlr=lr_schdlr,
ema=ema,
dltrain_x=dltrain_x,
dltrain_u=dltrain_u,
dltrain_all=dltrain_all,
lb_guessor=lb_guessor,
)
n_labeled = int(args.n_labeled / args.n_classes)
best_acc, top1 = -1, -1
results = {'top 1 acc': [], 'best_acc': []}
b_schedule = [args.n_epochs/2, 3*args.n_epochs/4]
if args.boot_schedule == 1:
step = int(args.n_epochs/3)
b_schedule = [step, 2*step]
elif args.boot_schedule == 2:
step = int(args.n_epochs/4)
b_schedule = [step, 2*step, 3*step]
for e in range(args.n_epochs):
if args.bootstrap > 1 and (e in b_schedule):
seed = 99
n_labeled *= args.bootstrap
name = sort_unlabeled(ema, n_labeled)
print("Bootstrap at epoch ", e," Name = ",name)
dltrain_x, dltrain_u, dltrain_all = get_train_loader(args.batchsize, args.mu, args.mu_c, n_iters_per_epoch,
L=10*n_labeled, seed=seed, name=name)
train_args = dict(
model=model,
criteria_x=criteria_x,
criteria_u=criteria_u,
optim=optim,
lr_schdlr=lr_schdlr,
ema=ema,
dltrain_x=dltrain_x,
dltrain_u=dltrain_u,
dltrain_all=dltrain_all,
lb_guessor=lb_guessor,
)
model.train()
train_one_epoch(**train_args)
torch.cuda.empty_cache()
if args.test == 0 or args.lam_clr < epsilon:
top1 = evaluate(ema) * 100
elif args.test == 1:
memory_data = utils.CIFAR10Pair(root='dataset', train=True, transform=utils.test_transform, download=False)
memory_data_loader = DataLoader(memory_data, batch_size=args.batchsize, shuffle=False, num_workers=16, pin_memory=True)
test_data = utils.CIFAR10Pair(root='dataset', train=False, transform=utils.test_transform, download=False)
test_data_loader = DataLoader(test_data, batch_size=args.batchsize, shuffle=False, num_workers=16, pin_memory=True)
c = len(memory_data.classes) #10
top1 = test(model, memory_data_loader, test_data_loader, c, e)
best_acc = top1 if best_acc < top1 else best_acc
results['top 1 acc'].append('{:.4f}'.format(top1))
results['best_acc'].append('{:.4f}'.format(best_acc))
data_frame = pd.DataFrame(data=results)
data_frame.to_csv(result_dir + '/' + save_name_pre + '.accuracy.csv', index_label='epoch')
log_msg = [
'epoch: {}'.format(e + 1),
'top 1 acc: {:.4f}'.format(top1),
'best_acc: {:.4f}'.format(best_acc)]
print(', '.join(log_msg))
if __name__ == '__main__':
train()
| 2.046875 | 2 |
Logistic Regression/main.py | Frightera/LR-and-NN-for-Cancer-Data | 4 | 6753 | import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
data = pd.read_csv("data.csv")
data.info()
"""
Data columns (total 33 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 id 569 non-null int64
.
.
.
32 Unnamed: 32 0 non-null float64
"""
data.drop(["Unnamed: 32", "id"], axis = 1, inplace = True)
# data.head(10)
data.diagnosis = [1 if each == "M" else 0 for each in data.diagnosis]
y = data.diagnosis.values
x_data = data.drop(["diagnosis"], axis = 1)
# %% Normalization
x_normalized = (x_data - np.min(x_data)) / (np.max(x_data) - np.min(x_data)).values
x_data.head()
"""
x_data.head()
Out[9]:
radius_mean texture_mean ... symmetry_worst fractal_dimension_worst
0 17.99 10.38 ... 0.4601 0.11890
1 20.57 17.77 ... 0.2750 0.08902
2 19.69 21.25 ... 0.3613 0.08758
3 11.42 20.38 ... 0.6638 0.17300
4 20.29 14.34 ... 0.2364 0.07678
"""
x_normalized.head()
"""
x_normalized.head()
Out[10]:
radius_mean texture_mean ... symmetry_worst fractal_dimension_worst
0 0.521037 0.022658 ... 0.598462 0.418864
1 0.643144 0.272574 ... 0.233590 0.222878
2 0.601496 0.390260 ... 0.403706 0.213433
3 0.210090 0.360839 ... 1.000000 0.773711
4 0.629893 0.156578 ... 0.157500 0.142595
"""
# %% train test split
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x_normalized,y,test_size = 0.25, random_state = 42)
# test size & random state can be changed, test size can be choosen as 0.2 or 0.18
# sklearn randomly splits, with given state data will be splitted with same random pattern.
# rows as features
x_train = x_train.T
x_test = x_test.T
y_train = y_train.T
y_test = y_test.T
# %% Parameter Initialize
"""
If all the weights were initialized to zero,
backpropagation will not work as expected because the gradient for the intermediate neurons
and starting neurons will die out(become zero) and will not update ever.
"""
def initialize_weights_and_bias(dimension):
w = np.full((dimension,1), 0.01) # init 0.01
b = np.zeros(1)
return w,b
def sigmoid(n):
y_hat = 1 / (1 + np.exp(-n))
return y_hat
# %%
def forward_backward_propagation(w,b,x_train,y_train):
# forward propagation
z = np.dot(w.T,x_train) + b
#y_train = y_train.T.reshape(-1,1)
y_hat = sigmoid(z)
loss = -(y_train*np.log(y_hat)+(1-y_train)*np.log(1-y_hat))
cost = (np.sum(loss))/x_train.shape[1] # x_train.shape[1] is for scaling
# Once cost is calculated, forward prop. is completed.
# backward propagation
derivative_weight = (np.dot(x_train,((y_hat-y_train).T)))/x_train.shape[1] # x_train.shape[1] is for scaling
derivative_bias = np.sum(y_hat-y_train)/x_train.shape[1] # x_train.shape[1] is for scaling
# x_train.shape[1] = 426
gradients = {"derivative_weight": derivative_weight,"derivative_bias": derivative_bias}
return cost,gradients
# Updating(learning) parameters
def update(w, b, x_train, y_train, learning_rate,number_of_iteration):
cost_list = []
cost_list2 = []
index = []
# updating(learning) parameters is number_of_iterarion times
for i in range(number_of_iteration):
# make forward and backward propagation and find cost and gradients
cost,gradients = forward_backward_propagation(w,b,x_train,y_train)
cost_list.append(cost)
# lets update
w = w - learning_rate * gradients["derivative_weight"]
b = b - learning_rate * gradients["derivative_bias"]
if i % 100 == 0: # that's arbitrary, you can set it differently
cost_list2.append(cost)
index.append(i)
print ("Cost after iteration %i: %f" %(i, cost))
# we update(learn) parameters weights and bias
parameters = {"weight": w,"bias": b}
plt.plot(index,cost_list2)
plt.xticks(index,rotation='vertical')
plt.xlabel("Number of Iteration")
plt.ylabel("Cost")
plt.legend()
plt.show()
return parameters, gradients, cost_list
# prediction
def predict(w,b,x_test):
# x_test is a input for forward propagation
z = sigmoid(np.dot(w.T,x_test)+b)
Y_prediction = np.zeros((1,x_test.shape[1]))
# if z is bigger than 0.5, our prediction is one - true (y_hat=1),
# if z is smaller than 0.5, our prediction is sign zero - false (y_hat=0),
for i in range(z.shape[1]):
if z[0,i]<= 0.5:
Y_prediction[0,i] = 0
else:
Y_prediction[0,i] = 1
return Y_prediction
#implementing logistic regression
def logistic_regression(x_train, y_train, x_test, y_test, learning_rate , num_iterations):
# initialize
dimension = x_train.shape[0]
w,b = initialize_weights_and_bias(dimension)
# do not change learning rate
parameters, gradients, cost_list = update(w, b, x_train, y_train, learning_rate,num_iterations)
y_prediction_test = predict(parameters["weight"],parameters["bias"],x_test)
y_pred_train = predict(parameters["weight"],parameters["bias"],x_train)
# Print accuracy
print("test accuracy: {} %".format(100 - np.mean(np.abs(y_prediction_test - y_test)) * 100))
print("train accuracy: {} %".format(100 - np.mean(np.abs(y_pred_train - y_train)) * 100))
# %% Hyperparameter tuning
logistic_regression(x_train, y_train, x_test, y_test,learning_rate = 3, num_iterations = 1500)
"""
Cost after iteration 0: 0.693035
Cost after iteration 100: 0.153169
Cost after iteration 200: 0.121662
Cost after iteration 300: 0.107146
Cost after iteration 400: 0.098404
Cost after iteration 500: 0.092401
Cost after iteration 600: 0.087937
Cost after iteration 700: 0.084435
Cost after iteration 800: 0.081582
Cost after iteration 900: 0.079191
Cost after iteration 1000: 0.077143
Cost after iteration 1100: 0.075359
Cost after iteration 1200: 0.073784
Cost after iteration 1300: 0.072378
Cost after iteration 1400: 0.071111
No handles with labels found to put in legend.
test accuracy: 98.6013986013986 %
train accuracy: 98.35680751173709 %
"""
logistic_regression(x_train, y_train, x_test, y_test,learning_rate = 1, num_iterations = 1500)
"""
Cost after iteration 0: 0.693035
Cost after iteration 100: 0.226383
Cost after iteration 200: 0.176670
Cost after iteration 300: 0.153585
Cost after iteration 400: 0.139306
Cost after iteration 500: 0.129319
Cost after iteration 600: 0.121835
Cost after iteration 700: 0.115963
Cost after iteration 800: 0.111204
Cost after iteration 900: 0.107248
No handles with labels found to put in legend.
Cost after iteration 1000: 0.103893
Cost after iteration 1100: 0.101001
Cost after iteration 1200: 0.098474
Cost after iteration 1300: 0.096240
Cost after iteration 1400: 0.094247
test accuracy: 97.9020979020979 %
train accuracy: 98.12206572769954 %
"""
logistic_regression(x_train, y_train, x_test, y_test,learning_rate = 0.3, num_iterations = 1500)
"""
Cost after iteration 0: 0.693035
Cost after iteration 100: 0.357455
Cost after iteration 200: 0.274917
Cost after iteration 300: 0.235865
Cost after iteration 400: 0.212165
Cost after iteration 500: 0.195780
Cost after iteration 600: 0.183524
Cost after iteration 700: 0.173868
Cost after iteration 800: 0.165980
Cost after iteration 900: 0.159363
Cost after iteration 1000: 0.153700
Cost after iteration 1100: 0.148775
Cost after iteration 1200: 0.144439
Cost after iteration 1300: 0.140581
Cost after iteration 1400: 0.137119
No handles with labels found to put in legend.
test accuracy: 97.9020979020979 %
train accuracy: 96.94835680751174 %
"""
# %% Sklearn
from sklearn.linear_model import LogisticRegression
x_train = x_train.T
x_test = x_test.T
y_train = y_train.T
y_test = y_test.T
logreg = LogisticRegression(random_state = 42,max_iter= 1500)
print("test accuracy: {} ".format(logreg.fit(x_train, y_train).score(x_test, y_test)))
print("train accuracy: {} ".format(logreg.fit(x_train, y_train).score(x_train, y_train)))
"""
test accuracy: 0.986013986013986
train accuracy: 0.9671361502347418
"""
# %%
| 2.8125 | 3 |
fine-tune/inference_embedding.py | LinHuiqing/nonparaSeq2seqVC_code | 199 | 6754 | import os
import numpy as np
import torch
import argparse
from hparams import create_hparams
from model import lcm
from train import load_model
from torch.utils.data import DataLoader
from reader import TextMelIDLoader, TextMelIDCollate, id2sp
from inference_utils import plot_data
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--checkpoint_path', type=str,
help='directory to save checkpoints')
parser.add_argument('--hparams', type=str,
required=False, help='comma separated name=value pairs')
args = parser.parse_args()
checkpoint_path=args.checkpoint_path
hparams = create_hparams(args.hparams)
model = load_model(hparams)
model.load_state_dict(torch.load(checkpoint_path)['state_dict'], strict=False)
_ = model.eval()
def gen_embedding(speaker):
training_list = hparams.training_list
train_set_A = TextMelIDLoader(training_list, hparams.mel_mean_std, hparams.speaker_A,
hparams.speaker_B,
shuffle=False,pids=[speaker])
collate_fn = TextMelIDCollate(lcm(hparams.n_frames_per_step_encoder,
hparams.n_frames_per_step_decoder))
train_loader_A = DataLoader(train_set_A, num_workers=1, shuffle=False,
sampler=None,
batch_size=1, pin_memory=False,
drop_last=True, collate_fn=collate_fn)
with torch.no_grad():
speaker_embeddings = []
for i,batch in enumerate(train_loader_A):
#print i
x, y = model.parse_batch(batch)
text_input_padded, mel_padded, text_lengths, mel_lengths, speaker_id = x
speaker_id, speaker_embedding = model.speaker_encoder.inference(mel_padded)
speaker_embedding = speaker_embedding.data.cpu().numpy()
speaker_embeddings.append(speaker_embedding)
speaker_embeddings = np.vstack(speaker_embeddings)
print(speaker_embeddings.shape)
if not os.path.exists('outdir/embeddings'):
os.makedirs('outdir/embeddings')
np.save('outdir/embeddings/%s.npy'%speaker, speaker_embeddings)
plot_data([speaker_embeddings],
'outdir/embeddings/%s.pdf'%speaker)
print('Generating embedding of %s ...'%hparams.speaker_A)
gen_embedding(hparams.speaker_A)
print('Generating embedding of %s ...'%hparams.speaker_B)
gen_embedding(hparams.speaker_B)
| 2.234375 | 2 |
dalme_app/migrations/0001_initial.py | DALME/dalme | 6 | 6755 | <gh_stars>1-10
# Generated by Django 3.1.2 on 2020-11-29 13:25
import dalme_app.models._templates
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_currentuser.middleware
import uuid
import wagtail.search.index
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0012_alter_user_first_name_max_length'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='rs_collection',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, null=True)),
('user', models.IntegerField(null=True)),
('created', models.DateTimeField(blank=True, null=True)),
('public', models.IntegerField(default='0')),
('theme', models.CharField(max_length=100, null=True)),
('theme2', models.CharField(max_length=100, null=True)),
('theme3', models.CharField(max_length=100, null=True)),
('allow_changes', models.IntegerField(default='0')),
('cant_delete', models.IntegerField(default='0')),
('keywords', models.TextField()),
('savedsearch', models.IntegerField(null=True)),
('home_page_publish', models.IntegerField(null=True)),
('home_page_text', models.TextField()),
('home_page_image', models.IntegerField(null=True)),
('session_id', models.IntegerField(null=True)),
('theme4', models.CharField(max_length=100, null=True)),
('theme5', models.CharField(max_length=100, null=True)),
('theme6', models.CharField(max_length=100, null=True)),
('theme7', models.CharField(max_length=100, null=True)),
('theme8', models.CharField(max_length=100, null=True)),
('theme9', models.CharField(max_length=100, null=True)),
('theme10', models.CharField(max_length=100, null=True)),
('theme11', models.CharField(max_length=100, null=True)),
('theme12', models.CharField(max_length=100, null=True)),
('theme13', models.CharField(max_length=100, null=True)),
('theme14', models.CharField(max_length=100, null=True)),
('theme15', models.CharField(max_length=100, null=True)),
('theme16', models.CharField(max_length=100, null=True)),
('theme17', models.CharField(max_length=100, null=True)),
('theme18', models.CharField(max_length=100, null=True)),
('theme19', models.CharField(max_length=100, null=True)),
('theme20', models.CharField(max_length=100, null=True)),
],
options={
'db_table': 'collection',
'managed': False,
},
),
migrations.CreateModel(
name='rs_collection_resource',
fields=[
('date_added', models.DateTimeField(auto_now_add=True, primary_key=True, serialize=False)),
('comment', models.TextField()),
('rating', models.IntegerField(null=True)),
('use_as_theme_thumbnail', models.IntegerField(null=True)),
('purchase_size', models.CharField(max_length=10, null=True)),
('purchase_complete', models.IntegerField(default='0')),
('purchase_price', models.FloatField(default='0.00', max_length=10)),
('sortorder', models.IntegerField(null=True)),
],
options={
'db_table': 'collection_resource',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=200, null=True)),
('resource_type', models.IntegerField(null=True)),
('has_image', models.IntegerField(default='0')),
('is_transcoding', models.IntegerField(default='0')),
('hit_count', models.IntegerField(default='0')),
('new_hit_count', models.IntegerField(default='0')),
('creation_date', models.DateTimeField(blank=True, null=True)),
('rating', models.IntegerField(null=True)),
('user_rating', models.IntegerField(null=True)),
('user_rating_count', models.IntegerField(null=True)),
('user_rating_total', models.IntegerField(null=True)),
('country', models.CharField(default=None, max_length=200, null=True)),
('file_extension', models.CharField(max_length=10, null=True)),
('preview_extension', models.CharField(max_length=10, null=True)),
('image_red', models.IntegerField(null=True)),
('image_green', models.IntegerField(null=True)),
('image_blue', models.IntegerField(null=True)),
('thumb_width', models.IntegerField(null=True)),
('thumb_height', models.IntegerField(null=True)),
('archive', models.IntegerField(default='0')),
('access', models.IntegerField(default='0')),
('colour_key', models.CharField(max_length=5, null=True)),
('created_by', models.IntegerField(null=True)),
('file_path', models.CharField(max_length=500, null=True)),
('file_modified', models.DateTimeField(blank=True, null=True)),
('file_checksum', models.CharField(max_length=32, null=True)),
('request_count', models.IntegerField(default='0')),
('expiry_notification_sent', models.IntegerField(default='0')),
('preview_tweaks', models.CharField(max_length=50, null=True)),
('geo_lat', models.FloatField(default=None, null=True)),
('geo_long', models.FloatField(default=None, null=True)),
('mapzoom', models.IntegerField(null=True)),
('disk_usage', models.IntegerField(null=True)),
('disk_usage_last_updated', models.DateTimeField(blank=True, null=True)),
('file_size', models.IntegerField(default=None, null=True)),
('preview_attempts', models.IntegerField(default=None, null=True)),
('field12', models.CharField(default=None, max_length=200, null=True)),
('field8', models.CharField(default=None, max_length=200, null=True)),
('field3', models.CharField(default=None, max_length=200, null=True)),
('annotation_count', models.IntegerField(null=True)),
('field51', models.CharField(default=None, max_length=200, null=True)),
('field79', models.CharField(blank=True, default=None, max_length=200, null=True)),
('modified', models.DateTimeField(auto_now_add=True, null=True)),
],
options={
'db_table': 'resource',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource_data',
fields=[
('django_id', models.IntegerField(db_column='django_id', primary_key=True, serialize=False)),
('value', models.TextField()),
],
options={
'db_table': 'resource_data',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource_type_field',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50, null=True)),
('title', models.CharField(max_length=400, null=True)),
('type', models.IntegerField(null=True)),
('order_by', models.IntegerField(default='0')),
('keywords_index', models.IntegerField(default='0')),
('partial_index', models.IntegerField(default='0')),
('resource_type', models.IntegerField(default='0')),
('resource_column', models.CharField(max_length=50, null=True)),
('display_field', models.IntegerField(default='1')),
('use_for_similar', models.IntegerField(default='1')),
('iptc_equiv', models.CharField(max_length=20, null=True)),
('display_template', models.TextField()),
('tab_name', models.CharField(max_length=50, null=True)),
('required', models.IntegerField(default='0')),
('smart_theme_name', models.CharField(max_length=200, null=True)),
('exiftool_field', models.CharField(max_length=200, null=True)),
('advanced_search', models.IntegerField(default='1')),
('simple_search', models.IntegerField(default='0')),
('help_text', models.TextField()),
('display_as_dropdown', models.IntegerField(default='0')),
('external_user_access', models.IntegerField(default='1')),
('autocomplete_macro', models.TextField()),
('hide_when_uploading', models.IntegerField(default='0')),
('hide_when_restricted', models.IntegerField(default='0')),
('value_filter', models.TextField()),
('exiftool_filter', models.TextField()),
('omit_when_copying', models.IntegerField(default='0')),
('tooltip_text', models.TextField()),
('regexp_filter', models.CharField(max_length=400, null=True)),
('sync_field', models.IntegerField(null=True)),
('display_condition', models.CharField(max_length=400, null=True)),
('onchange_macro', models.TextField()),
('field_constraint', models.IntegerField(null=True)),
('linked_data_field', models.TextField()),
('automatic_nodes_ordering', models.IntegerField(default='0')),
('fits_field', models.CharField(max_length=255, null=True)),
('personal_data', models.IntegerField(default='0')),
],
options={
'db_table': 'resource_type_field',
'managed': False,
},
),
migrations.CreateModel(
name='rs_user',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=50, unique=True)),
('password', models.CharField(max_length=64, null=True)),
('fullname', models.CharField(max_length=100, null=True)),
('email', models.CharField(max_length=100, null=True)),
('usergroup', models.IntegerField(choices=[(2, 'General User'), (4, 'Archivist'), (1, 'Administrator'), (3, 'Super Admin')], null=True)),
('last_active', models.DateTimeField(blank=True, null=True)),
('logged_in', models.IntegerField(null=True)),
('last_browser', models.TextField()),
('last_ip', models.CharField(max_length=100, null=True)),
('current_collection', models.IntegerField(null=True)),
('accepted_terms', models.IntegerField(default='0')),
('account_expires', models.DateTimeField(blank=True, null=True)),
('comments', models.TextField()),
('session', models.CharField(max_length=50, null=True)),
('ip_restrict', models.TextField()),
('search_filter_override', models.TextField()),
('password_last_change', models.DateTimeField(null=True)),
('login_tries', models.IntegerField(default='0')),
('login_last_try', models.DateTimeField(blank=True, null=True)),
('approved', models.IntegerField(default='1')),
('lang', models.CharField(max_length=11, null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('hidden_collections', models.TextField()),
('password_reset_hash', models.CharField(max_length=100, null=True)),
('origin', models.CharField(max_length=50, null=True)),
('unique_hash', models.CharField(max_length=50, null=True)),
('wp_authrequest', models.CharField(max_length=50, null=True)),
('csrf_token', models.CharField(max_length=255, null=True)),
],
options={
'db_table': 'user',
'managed': False,
},
),
migrations.CreateModel(
name='Agent',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('standard_name', models.CharField(max_length=255)),
('type', models.IntegerField(choices=[(1, 'Person'), (2, 'Organization')])),
('notes', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_agent_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_agent_modification', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='agent', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('file', models.FileField(upload_to='attachments/%Y/%m/')),
('type', models.CharField(max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Attribute_type',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55, unique=True)),
('description', models.TextField()),
('data_type', models.CharField(choices=[('DATE', 'DATE (date)'), ('INT', 'INT (integer)'), ('STR', 'STR (string)'), ('TXT', 'TXT (text)'), ('FK-UUID', 'FK-UUID (DALME record)'), ('FK-INT', 'FK-INT (DALME record)')], max_length=15)),
('source', models.CharField(blank=True, default=None, max_length=255, null=True)),
('options_list', models.CharField(blank=True, default=None, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_type_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_type_modification', to=settings.AUTH_USER_MODEL)),
('same_as', models.ForeignKey(db_column='same_as', null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attribute_type')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Concept',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('getty_id', models.IntegerField(db_index=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_concept_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_concept_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Content_attributes',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('order', models.IntegerField(db_index=True, null=True)),
('required', models.BooleanField(default=False)),
('unique', models.BooleanField(default=True)),
('attribute_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='content_types', to='dalme_app.attribute_type')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Content_class',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55, unique=True)),
('description', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_class_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_class_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Content_type',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255, unique=True)),
('short_name', models.CharField(max_length=55)),
('description', models.TextField()),
('has_pages', models.BooleanField(db_index=True, default=False)),
('has_inventory', models.BooleanField(default=False)),
('parents', models.CharField(blank=True, default=None, max_length=255, null=True)),
('r1_inheritance', models.CharField(blank=True, default=None, max_length=255, null=True)),
('r2_inheritance', models.CharField(blank=True, default=None, max_length=255, null=True)),
('attribute_types', models.ManyToManyField(through='dalme_app.Content_attributes', to='dalme_app.Attribute_type')),
('content_class', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.content_class')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_type_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_type_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='CountryReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255, unique=True)),
('alpha_3_code', models.CharField(max_length=3)),
('alpha_2_code', models.CharField(max_length=2)),
('num_code', models.IntegerField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_countryreference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_countryreference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Entity_phrase',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('phrase', models.TextField(blank=True)),
('type', models.IntegerField(choices=[(1, 'Agent'), (2, 'Object'), (3, 'Place')])),
('object_id', models.UUIDField(db_index=True, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_entity_phrase_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_entity_phrase_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Headword',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('word', models.CharField(max_length=55)),
('full_lemma', models.CharField(max_length=255)),
('concept_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_headword_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_headword_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Object',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=55)),
('dam_id', models.IntegerField(db_index=True, null=True)),
('order', models.IntegerField(db_index=True)),
('canvas', models.TextField(null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_page_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_page_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['order'],
},
),
migrations.CreateModel(
name='Set',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('set_type', models.IntegerField(choices=[(1, 'Corpus'), (2, 'Collection'), (3, 'Dataset'), (4, 'Workset')])),
('is_public', models.BooleanField(default=False)),
('has_landing', models.BooleanField(default=False)),
('endpoint', models.CharField(max_length=55)),
('permissions', models.IntegerField(choices=[(1, 'Private'), (2, 'Others: view'), (3, 'Others: view|add'), (4, 'Others: view|add|delete')], default=2)),
('description', models.TextField()),
('stat_title', models.CharField(blank=True, max_length=25, null=True)),
('stat_text', models.CharField(blank=True, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_creation', to=settings.AUTH_USER_MODEL)),
('dataset_usergroup', models.ForeignKey(limit_choices_to={'properties__type': 3}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='dataset', to='auth.group')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Source',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55)),
('has_inventory', models.BooleanField(db_index=True, default=False)),
('is_private', models.BooleanField(db_index=True, default=False)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_related', to=settings.AUTH_USER_MODEL)),
],
bases=(wagtail.search.index.Indexed, models.Model),
),
migrations.CreateModel(
name='Wordform',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('normalized_form', models.CharField(max_length=55)),
('pos', models.CharField(max_length=255)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_wordform_creation', to=settings.AUTH_USER_MODEL)),
('headword_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.headword')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_wordform_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Transcription',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('transcription', models.TextField(blank=True, default=None)),
('author', models.CharField(default=dalme_app.models._templates.get_current_username, max_length=255)),
('version', models.IntegerField(default=1)),
('count_ignore', models.BooleanField(default=False)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_transcription_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_transcription_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Token',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('raw_token', models.CharField(max_length=255)),
('clean_token', models.CharField(max_length=55)),
('order', models.IntegerField(db_index=True)),
('flags', models.CharField(max_length=10)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_token_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_token_modification', to=settings.AUTH_USER_MODEL)),
('object_phrase_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.entity_phrase')),
('wordform_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.wordform')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('subject', models.CharField(max_length=140)),
('description', models.TextField(blank=True, null=True)),
('status', models.IntegerField(choices=[(0, 'Open'), (1, 'Closed')], default=0)),
('url', models.CharField(default=None, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_ticket_creation', to=settings.AUTH_USER_MODEL)),
('file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_ticket_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['status', 'creation_timestamp'],
},
),
migrations.CreateModel(
name='TaskList',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=60)),
('slug', models.SlugField(default='')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_creation', to=settings.AUTH_USER_MODEL)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_list_group', to='auth.group')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_related', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Task Lists',
'ordering': ['name'],
'unique_together': {('group', 'slug')},
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('title', models.CharField(max_length=140)),
('due_date', models.DateField(blank=True, null=True)),
('completed', models.BooleanField(default=False)),
('completed_date', models.DateField(blank=True, null=True)),
('description', models.TextField(blank=True, null=True)),
('priority', models.PositiveIntegerField(blank=True, null=True)),
('position', models.CharField(blank=True, default=None, max_length=255)),
('url', models.CharField(default=None, max_length=255, null=True)),
('assigned_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_assigned_to', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_created_by', to=settings.AUTH_USER_MODEL)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_task_creation', to=settings.AUTH_USER_MODEL)),
('file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_task_modification', to=settings.AUTH_USER_MODEL)),
('task_list', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.tasklist')),
('workset', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='dalme_app.set')),
],
options={
'ordering': ['priority', 'creation_timestamp'],
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('tag_type', models.CharField(choices=[('WF', 'Workflow'), ('C', 'Control'), ('T', 'Ticket')], max_length=2)),
('tag', models.CharField(default=None, max_length=55, null=True)),
('tag_group', models.CharField(default=None, max_length=255, null=True)),
('object_id', models.CharField(db_index=True, max_length=55, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tag_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tag_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Source_pages',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_pages_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_pages_modification', to=settings.AUTH_USER_MODEL)),
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sources', to='dalme_app.page')),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='source_pages', to='dalme_app.source')),
('transcription', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='source_pages', to='dalme_app.transcription')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='source',
name='pages',
field=models.ManyToManyField(db_index=True, through='dalme_app.Source_pages', to='dalme_app.Page'),
),
migrations.AddField(
model_name='source',
name='parent',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='dalme_app.source'),
),
migrations.AddField(
model_name='source',
name='primary_dataset',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_query_name='set_members', to='dalme_app.set'),
),
migrations.AddField(
model_name='source',
name='type',
field=models.ForeignKey(db_column='type', on_delete=django.db.models.deletion.PROTECT, to='dalme_app.content_type'),
),
migrations.CreateModel(
name='Scope',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('type', models.IntegerField(choices=[(1, 'Temporal'), (2, 'Spatial'), (3, 'Linguistic'), (4, 'Context')])),
('range', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_scope_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_scope_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RightsPolicy',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=100)),
('rights_status', models.IntegerField(choices=[(1, 'Copyrighted'), (2, 'Orphaned'), (3, 'Owned'), (4, 'Public Domain'), (5, 'Unknown')], default=5)),
('rights', models.TextField(blank=True, default=None)),
('rights_notice', models.JSONField(null=True)),
('licence', models.TextField(blank=True, default=None, null=True)),
('rights_holder', models.CharField(default=None, max_length=255, null=True)),
('notice_display', models.BooleanField(default=False)),
('public_display', models.BooleanField(default=True)),
('attachments', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_rightspolicy_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_rightspolicy_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Relationship',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('source_object_id', models.UUIDField(db_index=True, null=True)),
('target_object_id', models.UUIDField(db_index=True, null=True)),
('notes', models.TextField(blank=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_relationship_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_relationship_modification', to=settings.AUTH_USER_MODEL)),
('scope', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dalme_app.scope')),
('source_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_sources', to='contenttypes.contenttype')),
('target_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_targets', to='contenttypes.contenttype')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PublicRegister',
fields=[
('object_id', models.UUIDField(db_index=True, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creator', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_publicregister_creation', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(blank=True, max_length=50)),
('primary_group', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='auth.group')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Place',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('std_name', models.CharField(max_length=255)),
('type', models.IntegerField(db_index=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_place_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_place_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Object_attribute',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('attribute_concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_attribute_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_attribute_modification', to=settings.AUTH_USER_MODEL)),
('object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.object')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='LanguageReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('glottocode', models.CharField(max_length=25, unique=True)),
('iso6393', models.CharField(blank=True, default=None, max_length=25, null=True, unique=True)),
('name', models.CharField(max_length=255)),
('type', models.IntegerField(choices=[(1, 'Language'), (2, 'Dialect')])),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_languagereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_languagereference_modification', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.languagereference')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='GroupProperties',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.IntegerField(choices=[(1, 'Admin'), (2, 'DAM'), (3, 'Dataset'), (4, 'Knowledge Base'), (5, 'Website')])),
('description', models.CharField(max_length=255)),
('group', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='properties', to='auth.group')),
],
),
migrations.AddField(
model_name='entity_phrase',
name='transcription_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entity_phrases', to='dalme_app.transcription'),
),
migrations.AddField(
model_name='content_attributes',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attribute_type_list', to='dalme_app.content_type'),
),
migrations.AddField(
model_name='content_attributes',
name='creation_user',
field=models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_attributes_creation', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='content_attributes',
name='modification_user',
field=models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_attributes_modification', to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.CharField(db_index=True, max_length=55, null=True)),
('body', models.TextField(blank=True, default=None, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_comment_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_comment_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['creation_timestamp'],
},
),
migrations.CreateModel(
name='AttributeReference',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55)),
('description', models.TextField()),
('data_type', models.CharField(max_length=15)),
('source', models.CharField(max_length=255)),
('term_type', models.CharField(blank=True, default=None, max_length=55)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attributereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attributereference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Workflow',
fields=[
('source', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='workflow', serialize=False, to='dalme_app.source')),
('wf_status', models.IntegerField(choices=[(1, 'assessing'), (2, 'processing'), (3, 'processed')], default=2)),
('stage', models.IntegerField(choices=[(1, 'ingestion'), (2, 'transcription'), (3, 'markup'), (4, 'review'), (5, 'parsing')], default=1)),
('last_modified', models.DateTimeField(blank=True, null=True)),
('help_flag', models.BooleanField(default=False)),
('ingestion_done', models.BooleanField(default=False)),
('transcription_done', models.BooleanField(default=False)),
('markup_done', models.BooleanField(default=False)),
('parsing_done', models.BooleanField(default=False)),
('review_done', models.BooleanField(default=False)),
('is_public', models.BooleanField(default=False)),
('last_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Work_log',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('event', models.CharField(max_length=255)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='work_log', to='dalme_app.workflow')),
],
),
migrations.CreateModel(
name='Source_credit',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('type', models.IntegerField(choices=[(1, 'Editor'), (2, 'Corrections'), (3, 'Contributor')])),
('note', models.CharField(blank=True, max_length=255, null=True)),
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='dalme_app.agent')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_credit_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_credit_modification', to=settings.AUTH_USER_MODEL)),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='dalme_app.source')),
],
options={
'unique_together': {('source', 'agent', 'type')},
},
),
migrations.AlterUniqueTogether(
name='source',
unique_together={('type', 'name')},
),
migrations.CreateModel(
name='Set_x_content',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.UUIDField(db_index=True, default=uuid.uuid4)),
('workset_done', models.BooleanField(default=False)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_x_content_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_x_content_modification', to=settings.AUTH_USER_MODEL)),
('set_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='dalme_app.set')),
],
options={
'ordering': ['set_id', 'id'],
'unique_together': {('content_type', 'object_id', 'set_id')},
},
),
migrations.CreateModel(
name='LocaleReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('administrative_region', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=6, max_digits=9, null=True)),
('longitude', models.DecimalField(decimal_places=6, max_digits=9, null=True)),
('country', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.countryreference')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_localereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_localereference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['country', 'name'],
'unique_together': {('name', 'administrative_region')},
},
),
migrations.CreateModel(
name='Attribute',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.UUIDField(db_index=True, null=True)),
('value_STR', models.CharField(blank=True, default=None, max_length=255, null=True)),
('value_DATE_d', models.IntegerField(blank=True, null=True)),
('value_DATE_m', models.IntegerField(blank=True, null=True)),
('value_DATE_y', models.IntegerField(blank=True, null=True)),
('value_DATE', models.DateField(blank=True, null=True)),
('value_INT', models.IntegerField(blank=True, null=True)),
('value_TXT', models.TextField(blank=True, default=None, null=True)),
('value_JSON', models.JSONField(null=True)),
('attribute_type', models.ForeignKey(db_column='attribute_type', on_delete=django.db.models.deletion.CASCADE, to='dalme_app.attribute_type')),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('object_id', 'attribute_type', 'value_STR')},
},
),
]
| 1.648438 | 2 |
django_app/DataEntrySystem/apps.py | Hezepeng/Financial-Acquisition-And-Editing-System | 0 | 6756 | <gh_stars>0
from django.apps import AppConfig
class DataentrysystemConfig(AppConfig):
name = 'DataEntrySystem'
| 1.210938 | 1 |
bombgame/recursive_bt_maze.py | JeFaProductions/bombgame2 | 0 | 6757 | <filename>bombgame/recursive_bt_maze.py
# recursive_bt_maze.py
#
# Author: <NAME>
# Created On: 16 Feb 2019
import os
import random
import numpy as np
class RecursiveBTMaze:
def __init__(self, width, height):
if width % 2 == 0 or height % 2 == 0:
raise ValueError("Width and height need to be odd.")
self.width = width
self.height = height
self.go = {'N': np.array([0, 2]),
'E': np.array([2, 0]),
'S': np.array([0, -2]),
'W': np.array([-2, 0])}
self.go_half = {key: (0.5 * value).astype(np.int) for key, value in self.go.items()}
self.opposite = {'N': 'S', 'E': 'W', 'S': 'N', 'W': 'E'}
# 0: path, 1: wall.
self.data = np.ones((height, width), dtype=np.int)
self.stack = []
index = np.array([random.randint(0, self.height - 1),
random.randint(0, self.width - 1)])
index[index % 2 == 0] += 1
self.stack.append([index, self.shuffle_directions()])
def generate(self):
while self.next():
pass
def next(self, borders=False):
if self.stack:
index, directions = self.stack.pop()
stack_size = len(self.stack)
directions_size = len(directions)
while directions:
direction = directions.pop()
new_index = index + self.go[direction]
# Special case at the borders.
if borders:
if self.cell_valid(index + self.go_half[direction]) and not self.cell_valid(new_index):
if random.choice([0, 1]):
y, x = index + self.go_half[direction]
self.data[y, x] = 0
if self.cell_valid(new_index) and not self.cell_visited(new_index):
self.stack.append([index, directions])
self.cell_move(index, new_index)
self.stack.append([new_index, self.shuffle_directions()])
break
if directions_size == 4 and not directions and len(self.stack) == stack_size:
self.random_break(index)
return True
else:
return False
def random_break(self, index):
for direction in self.shuffle_directions():
new_index = index + self.go[direction]
if self.cell_valid(new_index) and self.cell_value(index + self.go_half[direction]) == 1:
self.cell_move(index, new_index)
break
def cell_value(self, index):
y, x = index
return self.data[y, x]
def cell_visited(self, index):
return self.cell_value(index) != 1
def cell_valid(self, index):
y, x = index
if y < 0 or y >= self.height or x < 0 or x >= self.width:
return False
return True
def cell_move(self, index, new_index):
y, x = new_index
self.data[y, x] = 0
y, x = (index + 0.5 * (new_index - index)).astype(np.int)
self.data[y, x] = 0
def shuffle_directions(self):
return random.sample(self.go.keys(), len(self.go.keys()))
def itermaze(self):
return self.__iter2d__(self.data)
@staticmethod
def __iter2d__(data):
for i in range(data.shape[0]):
for j in range(data.shape[1]):
yield np.array([i, j]), data[i, j]
def __str__(self):
data = -1 * np.ones((self.height + 2, self.width + 2))
out = ''
wall = '#'
path = '0'
border = '+'
data[1:-1, 1:-1] = self.data
for index, value in self.__iter2d__(data):
if index[1] == 0:
out += os.linesep
if value == -1:
out += border
elif value == 0:
out += path
elif value == 1:
out += wall
return out
| 3.484375 | 3 |
KV_Reader.py | Nibuja05/KVConverter | 2 | 6758 |
import re
import math
class KVPart():
"""docstring for KVPart"""
def __init__(self, name, tab_count = 0):
#super(KVPart, self).__init__()
self.name = name
self.values = []
self.tab_count = tab_count
self.parent = None
self.master = False
def add_simple_value(self, value):
self.values.append(value)
def add_KVPart(self, name):
if self.master == False:
new_KVPart = KVPart(name, self.tab_count + 1)
else:
new_KVPart = KVPart(name, self.tab_count)
new_KVPart.set_parent(self)
self.values.append(new_KVPart)
return new_KVPart
def add_KVPart_finished(self, part):
if not part is None:
part.set_tab_count(self.tab_count + 1)
self.values.append(part)
def add_KVComment(self, text):
new_KVComment = KVComment(text)
self.values.append(new_KVComment)
def is_empty(self):
if len(self.values) == 0:
return True
return False
def set_parent(self, parent):
self.parent = parent
def get_parent(self):
return self.parent
def has_parent(self):
if self.parent is not None:
return True
return False
def get_name(self):
return self.name
def set_master(self, boolean):
self.master = boolean
def get_values(self):
return self.values
def has_KV_child(self):
return any(isinstance(x, KVPart) for x in self.values)
def set_tab_count(self, count):
self.tab_count = count
def items(self):
return self.name, self.values[0]
def __str__(self):
if self.master == False:
string = self.fTab(self.tab_count) + "\"" + self.name + "\""
if any(isinstance(x, KVPart) for x in self.values):
string += "\n" + self.fTab(self.tab_count) + "{\n"
else:
count = self.get_normal_space(string)
string += self.get_normal_space(string)
for x in self.values:
if type(x) is KVPart:
string += str(x)
elif type(x) is KVComment:
string += self.fTab(self.tab_count + 1) + str(x) + "\n"
else:
string += "\"" + str(x) + "\"\n"
if any(isinstance(x, KVPart) for x in self.values):
string += self.fTab(self.tab_count) + "}\n"
return string
else:
if len(self.values) > 1:
string = ""
for x in self.values:
string += str(x) + "\n"
return string
else:
return ""
def __repr__(self):
return "<|" + self.name + "|>"
def fTab(self, count):
string = ""
for x in range(count):
string += "\t"
return string
def get_normal_space(self, text):
lines = text.splitlines()
last_line = lines[len(lines) - 1]
new_position = last_line.rfind("\"")
tab_count = math.floor((40 - new_position) / 5)
space_count = ((40 - new_position) % 5) + 1
string = ""
for x in range(space_count):
string += " "
string += self.fTab(tab_count)
return string
class KVComment():
"""docstring for KVComment"""
def __init__(self, text):
#super(KVComment, self).__init__()
self.text = text
def __str__(self):
return self.text
def read_file(path):
#path = input("Please enter the path of the KV File:")
#path = "C:\\Steam\\steamapps\\common\\dota 2 beta\\game\\dota_addons\\heataria\\scripts\\npc\\abilities\\heataria_blaze_path.txt"
try:
file = open(path, "r")
text = file.read()
except FileNotFoundError:
text = read_file()
finally:
master = KVPart("master")
master.set_master(True)
progress_text(text, master)
return master
#processes a KV textfile into a KV_Part structure
def progress_text(text, last_KVPart = None):
if last_KVPart is not None:
#search patterns to check structure
quote_pattern = r'\"(.*?)\"'
open_pattern = r'.*{'
close_pattern = r'.*}'
comment_pattern = r'//.*'
quote_match = re.search(quote_pattern, text)
open_match = re.search(open_pattern, text)
close_match = re.search(close_pattern, text)
comment_match = re.search(comment_pattern, text)
#cancel if there are no more quotes left
if quote_match is not None:
quote_start = quote_match.start()
else:
return
#if there are no brackets left, give them a placeholder value
if open_match is not None:
open_start = open_match.start()
else:
open_start = len(text)
if close_match is not None:
close_start = close_match.start()
else:
close_start = len(text)
if comment_match is not None:
comment_start = comment_match.start()
else:
comment_start = len(text)
string = quote_match.group(1)
#print("SEACH: q." + str(quote_start) + " o." + str(open_start) + " cl." + str(close_start) + " co." + str(comment_start))
if comment_start < quote_start and comment_start < open_start and comment_start < close_start:
string = comment_match.group()
text = text[comment_match.end() + 1:]
last_KVPart.add_KVComment(string)
progress_text(text, last_KVPart)
#no bracktes before next quote -> simply add to current KV_Part
elif quote_start < open_start and quote_start < close_start:
#check if its a value or key
if last_KVPart.is_empty() and not last_KVPart.get_name() == "master":
last_KVPart.add_simple_value(string)
new_KVPart = last_KVPart.get_parent()
else:
new_KVPart = last_KVPart.add_KVPart(string)
text = text[quote_match.end() + 1:]
progress_text(text, new_KVPart)
#closing bracket -> remove bracket and move to parent KV_Part
elif close_start < quote_start:
text = text[close_match.end() + 1:]
if last_KVPart.has_parent():
temp_KVPart = last_KVPart.get_parent()
else:
temp_KVPart = last_KVPart
progress_text(text, temp_KVPart)
#opening bracket -> creates a new child KV_Part
elif open_start < quote_start:
new_KVPart = last_KVPart.add_KVPart(string)
text = text[quote_match.end() + 1:]
progress_text(text, new_KVPart)
| 3.03125 | 3 |
scripts/updatetestsuiterefimages.py | PaulDoessel/appleseed | 0 | 6759 | #!/usr/bin/python
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2014-2016 <NAME>, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import print_function
import argparse
import os
import shutil
#--------------------------------------------------------------------------------------------------
# Utility functions.
#--------------------------------------------------------------------------------------------------
def safe_mkdir(dir):
if not os.path.exists(dir):
os.mkdir(dir)
def walk(directory, recursive):
if recursive:
for dirpath, dirnames, filenames in os.walk(directory):
yield dirpath, dirnames, filenames
else:
yield os.walk(directory).next()
#--------------------------------------------------------------------------------------------------
# Update reference images in a given test suite directory.
#--------------------------------------------------------------------------------------------------
def update_ref_images(parent_dir):
renders_dir = os.path.join(parent_dir, "renders")
ref_dir = os.path.join(parent_dir, "ref")
safe_mkdir(ref_dir)
for filename in os.listdir(renders_dir):
if os.path.splitext(filename)[1] == ".png":
src_path = os.path.join(renders_dir, filename)
dst_path = os.path.join(ref_dir, filename)
print(" copying {0} to {1}...".format(src_path, dst_path))
shutil.copyfile(src_path, dst_path)
#--------------------------------------------------------------------------------------------------
# Entry point.
#--------------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="update functional test suite reference images.")
parser.add_argument("-r", "--recursive", action='store_true', dest="recursive",
help="scan the specified directory and all its subdirectories")
parser.add_argument("directory", nargs='?', default=".", help="directory to scan")
args = parser.parse_args()
for dirpath, dirnames, filenames in walk(args.directory, args.recursive):
if "renders" in dirnames:
update_ref_images(dirpath)
if __name__ == '__main__':
main()
| 1.414063 | 1 |
raidquaza/poll/polls.py | Breee/raidquaza | 2 | 6760 | from typing import List, Any
import time
from discord import Embed, Reaction
from utils import uniquify
# EMOJIS regional_indicator_A to regional_indicator_T
reaction_emojies = ['\U0001F1E6',
'\U0001F1E7',
'\U0001F1E8',
'\U0001F1E9',
'\U0001F1EA',
'\U0001F1EB',
'\U0001F1EC',
'\U0001F1ED',
'\U0001F1EE',
'\U0001F1EF',
'\U0001F1F0',
'\U0001F1F1',
'\U0001F1F2',
'\U0001F1F3',
'\U0001F1F4',
'\U0001F1F5',
'\U0001F1F6',
'\U0001F1F7',
'\U0001F1F8',
'\U0001F1F9']
number_emojies = {'rq_plus_one': 1, 'rq_plus_two': 2, 'rq_plus_three': 3, 'rq_plus_four': 4}
class PollCreationException(Exception):
pass
class Poll(object):
"""
A Poll object.
"""
def __init__(self, poll_id: str, poll_title: str, options: List[Any], is_immortal=False, updated_since_start=True):
if options is None:
options = []
self.poll_id = poll_id
self.creation_time = time.time()
self.last_update = time.time()
self.poll_title = poll_title
self.options = uniquify(options)
self.reaction_to_option = {reaction_emojies[k]: options[k] for k in range(len(options))}
self.option_to_reaction = {options[k]: reaction_emojies[k] for k in range(len(options))}
self.participants = dict()
self.option_to_participants = {key: [] for key in options}
self.sent_message = None
self.received_message = None
self.is_immortal = is_immortal
self.is_enabled = True
self.updated_since_start = updated_since_start
async def full_update(self, reactions: List[Reaction], bot_user_id: int):
if self.updated_since_start:
return
self.reaction_to_option = {reaction_emojies[k]: self.options[k] for k in range(len(self.options))}
self.option_to_reaction = {self.options[k]: reaction_emojies[k] for k in range(len(self.options))}
self.participants = dict()
self.option_to_participants = {key: [] for key in self.options}
for reaction in reactions:
async for user in reaction.users():
if bot_user_id != user.id:
self.process_reaction(reaction=reaction, user=user, add=True)
self.updated_since_start = True
def process_reaction(self, reaction, user, add):
# get users + reaction emoji
if hasattr(user, 'nick') and user.nick is not None:
nick = user.nick
else:
nick = user.display_name
if reaction.emoji in self.reaction_to_option:
# set list of users for the option the reaction belongs to.
option = self.reaction_to_option[reaction.emoji]
if add and nick not in self.option_to_participants[option]:
self.option_to_participants[option].append(nick)
elif not add:
self.option_to_participants[option].remove(nick)
if nick not in self.participants:
self.participants[nick] = 1
if hasattr(reaction.emoji, 'name') and reaction.emoji.name in number_emojies:
amount = number_emojies[reaction.emoji.name]
self.participants[nick] += (amount if add else -1 * amount)
def to_discord(self):
msg = f'Poll for **{self.poll_title}**'
embed = Embed(color=0xbb1c1c)
for option, participants in self.option_to_participants.items():
reaction = self.option_to_reaction[option]
name = f'{reaction} {option}'
value = ', '.join(
sorted([f'{x} [{self.participants[x]}]' for x in participants])) if participants else '-'
field_counters = [self.participants[x] for x in participants]
total = sum(field_counters)
embed.add_field(name=f'{name} [{total}]', value=value, inline=False)
embed.set_footer(text=f'ID: {self.poll_id}')
return msg, embed
| 2.625 | 3 |
Python/Regex and Parsing/Validating and Parsing Email Addresses.py | pavstar619/HackerRank | 61 | 6761 | import email.utils as em
import re
class Main():
def __init__(self):
self.n = int(input())
for i in range(self.n):
self.s = em.parseaddr(input())
if re.match(r'^[a-zA-Z](\w|-|\.|_)+@[a-zA-Z]+\.[a-zA-Z]{0,3}$', self.s[1]):
print(em.formataddr(self.s))
if __name__ == '__main__':
obj = Main()
| 3.171875 | 3 |
chatbot/train.py | codingsoo/virtaul_girlfriend | 4 | 6762 | <reponame>codingsoo/virtaul_girlfriend
# -*- coding: utf-8 -*-
import tensorflow as tf
import random
import math
import os
from config import FLAGS
from model import Seq2Seq
from dialog import Dialog
def train(dialog, batch_size=100, epoch=100):
model = Seq2Seq(dialog.vocab_size)
with tf.Session() as sess:
# TODO: 세션을 로드하고 로그를 위한 summary 저장등의 로직을 Seq2Seq 모델로 넣을 필요가 있음
ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path):
print "Reading model from following file: " + ckpt.model_checkpoint_path
model.saver.restore(sess, ckpt.model_checkpoint_path)
else:
print "Creating new model"
sess.run(tf.global_variables_initializer())
writer = tf.summary.FileWriter(FLAGS.log_dir, sess.graph)
total_batch = int(math.ceil(len(dialog.examples)/float(batch_size)))
for step in range(total_batch * epoch):
enc_input, dec_input, targets = dialog.next_batch(batch_size)
_, loss = model.train(sess, enc_input, dec_input, targets)
if (step + 1) % 100 == 0:
model.write_logs(sess, writer, enc_input, dec_input, targets)
print ('Step:', '%06d' % model.global_step.eval(),\
'cost =', '{:.6f}'.format(loss))
checkpoint_path = os.path.join(FLAGS.train_dir, FLAGS.ckpt_name)
model.saver.save(sess, checkpoint_path, global_step=model.global_step)
print '최적화 완료!'
def test(dialog, batch_size=100):
print ("\n=== 예측 테스트 ===")
model = Seq2Seq(dialog.vocab_size)
with tf.Session() as sess:
ckpt = tf.train.get_checkpoint_state(FLAGS.train_dir)
print ("다음 파일에서 모델을 읽는 중 입니다..", ckpt.model_checkpoint_path)
model.saver.restore(sess, ckpt.model_checkpoint_path)
enc_input, dec_input, targets = dialog.next_batch(batch_size)
expect, outputs, accuracy = model.test(sess, enc_input, dec_input, targets)
expect = dialog.decode(expect)
outputs = dialog.decode(outputs)
pick = random.randrange(0, len(expect) / 2)
input = dialog.decode([dialog.examples[pick * 2]], True)
expect = dialog.decode([dialog.examples[pick * 2 + 1]], True)
outputs = dialog.cut_eos(outputs[pick])
print ("\n정확도:", accuracy)
print ("랜덤 결과\n",)
print (" 입력값:", input)
print (" 실제값:", expect)
print (" 예측값:", ' '.join(outputs))
def main(_):
dialog = Dialog()
dialog.load_vocab(FLAGS.voc_path)
dialog.load_examples(FLAGS.data_path)
if FLAGS.train:
train(dialog, batch_size=FLAGS.batch_size, epoch=FLAGS.epoch)
elif FLAGS.test:
test(dialog, batch_size=FLAGS.batch_size)
if __name__ == "__main__":
tf.app.run()
| 2.421875 | 2 |
evaluation/dmp_behavior.py | rock-learning/approxik | 1 | 6763 | # Author: <NAME> <<EMAIL>>
import numpy as np
from bolero.representation import BlackBoxBehavior
from bolero.representation import DMPBehavior as DMPBehaviorImpl
class DMPBehavior(BlackBoxBehavior):
"""Dynamical Movement Primitive.
Parameters
----------
execution_time : float, optional (default: 1)
Execution time of the DMP in seconds.
dt : float, optional (default: 0.01)
Time between successive steps in seconds.
n_features : int, optional (default: 50)
Number of RBF features for each dimension of the DMP.
configuration_file : string, optional (default: None)
Name of a configuration file that should be used to initialize the DMP.
If it is set all other arguments will be ignored.
"""
def __init__(self, execution_time=1.0, dt=0.01, n_features=50,
configuration_file=None):
self.dmp = DMPBehaviorImpl(execution_time, dt, n_features,
configuration_file)
def init(self, n_inputs, n_outputs):
"""Initialize the behavior.
Parameters
----------
n_inputs : int
number of inputs
n_outputs : int
number of outputs
"""
self.dmp.init(3 * n_inputs, 3 * n_outputs)
self.n_joints = n_inputs
self.x = np.empty(3 * self.n_joints)
self.x[:] = np.nan
def reset(self):
self.dmp.reset()
self.x[:] = 0.0
def set_inputs(self, inputs):
self.x[:self.n_joints] = inputs[:]
def can_step(self):
return self.dmp.can_step()
def step(self):
self.dmp.set_inputs(self.x)
self.dmp.step()
self.dmp.get_outputs(self.x)
def get_outputs(self, outputs):
outputs[:] = self.x[:self.n_joints]
def get_n_params(self):
return self.dmp.get_n_params()
def get_params(self):
return self.dmp.get_params()
def set_params(self, params):
self.dmp.set_params(params)
def set_meta_parameters(self, keys, values):
self.dmp.set_meta_parameters(keys, values)
def trajectory(self):
return self.dmp.trajectory()
class DMPBehaviorWithGoalParams(DMPBehavior):
def __init__(self, goal, execution_time=1.0, dt=0.01, n_features=50,
configuration_file=None):
super(DMPBehaviorWithGoalParams, self).__init__(
execution_time, dt, n_features, configuration_file)
self.params = np.copy(goal)
def set_meta_parameters(self, keys, values):
self.dmp.set_meta_parameters(keys, values)
self.set_params(self.params)
def get_n_params(self):
return len(self.params)
def get_params(self):
return self.params
def set_params(self, params):
self.params[:] = params
self.dmp.set_meta_parameters(["g"], [self.params])
| 2.578125 | 3 |
logger.py | oxsoftdev/bitstampws-logger | 0 | 6764 | import logging.config
import tornado
from bitstampws import Client as Websocket
import lib.configs.logging
from lib.subscribers import SimpleLoggerSubscriber
logging.config.dictConfig(lib.configs.logging.d)
if __name__ == '__main__':
with Websocket() as client:
with SimpleLoggerSubscriber(client):
client.connect()
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
client.close()
| 2 | 2 |
engine/tree.py | dougsc/gp | 0 | 6765 | import random
from pprint import pformat
from copy import deepcopy
from utils.logger import GP_Logger
from terminal_set import TerminalSet
class Tree:
@classmethod
def log(cls):
return GP_Logger.logger(cls.__name__)
def __init__(self):
self.terminal_set=None
self.function_set=None
self.function_bias=None
self.max_depth=None
self.tree = None
def clone(self, clone_tree):
assert clone_tree.tree != None, 'trying to clone from an uninitialized tree'
self.terminal_set = clone_tree.terminal_set
self.function_set = clone_tree.function_set
self.function_bias = clone_tree.function_bias
self.max_depth = clone_tree.max_depth
self.tree = deepcopy(clone_tree.tree)
def mutate(self, clone_tree):
self.clone(clone_tree)
mutation_node = random.choice(self.get_node_list())
self.log().debug('mutating at node %s - current depth: %d' % (mutation_node['node']['name'], mutation_node['depth']))
self._create_new_node(mutation_node['depth'], mutation_node)
self.log().debug('node mutated to %s' % (mutation_node['node']['name']))
self._add_layer(mutation_node)
def subtree_crossover(self, clone_tree, other_tree):
self.clone(clone_tree)
this_crossover_node = random.choice(self.get_node_list())
other_crossover_node = random.choice(other_tree.get_node_list())
self.log().debug('x-over node 1: %s (depth: %d), node 2: %s (depth: %d)' % (this_crossover_node['node']['name'],
this_crossover_node['depth'],
other_crossover_node['node']['name'],
other_crossover_node['depth']))
this_crossover_node['node'] = deepcopy(other_crossover_node['node'])
this_crossover_node['lower_nodes'] = deepcopy(other_crossover_node['lower_nodes'])
self.recalculate_depth(this_crossover_node['lower_nodes'], this_crossover_node['depth'] + 1)
def create(self, terminal_set=[], function_set=[], function_bias=1, max_depth=3):
self.terminal_set=terminal_set
self.function_set=function_set
self.function_bias=function_bias
self.max_depth=max_depth
self.tree = {}
self._create_new_node(1, self.tree)
self._add_layer(current_node=self.tree)
def _create_new_node(self, depth, node):
node_set = []
if depth == 1:
node_set = self.function_set
elif depth >= self.max_depth:
node_set = self.terminal_set
else:
node_set = self.function_set * self.function_bias + self.terminal_set
chosen_node = random.choice(node_set)
if not chosen_node.has_key('name'):
# this needs converting to a named node
value = chosen_node['function'](*chosen_node['args'])
chosen_node = TerminalSet.terminal_value(value)
node['node'] = chosen_node
node['lower_nodes'] = []
node['depth'] = depth
def _add_layer(self, current_node):
new_node_count = current_node['node'].has_key('arity') and current_node['node']['arity'] or 0
self.log().debug('adding %d nodes below %s - current depth = %d' % (new_node_count, current_node['node']['name'], current_node['depth']))
for i in range(new_node_count):
new_node = {}
self._create_new_node(current_node['depth'] + 1, new_node)
current_node['lower_nodes'].append(new_node)
map(lambda x:self._add_layer(x), current_node['lower_nodes'])
def dump(self):
print 'Tree: \n%s' % pformat(self.tree)
def _dump_structure(self, from_nodes, to_nodes):
for from_node in from_nodes:
new_node = {'name': from_node['node']['name'], 'lower_nodes': []}
to_nodes.append(new_node)
self._dump_structure(from_node['lower_nodes'], new_node['lower_nodes'])
def dump_structure(self):
structure = {'name': self.tree['node']['name'], 'lower_nodes': []}
self._dump_structure(self.tree['lower_nodes'], structure['lower_nodes'])
return structure
def execute_node(self, node, function_lookup, args=None):
assert node.has_key('value') or node.has_key('function'), 'node does not have a function or value'
value = None
if node.has_key('value'):
value = node['value']
else:
if args == None:
args = node['args']
if isinstance(node['function'], str):
value = function_lookup.get_func(node['function'])(*args)
else:
value = node['function'](*args)
return value
def get_lower_node_value(self, function_lookup, lower_node):
if lower_node['node']['node_type'] == 'terminal':
return self.execute_node(lower_node['node'], function_lookup)
else:
result_list = map(lambda x:self.get_lower_node_value(function_lookup, x), lower_node['lower_nodes'])
return self.execute_node(lower_node['node'], function_lookup, result_list)
def execute(self, function_lookup):
result_list = map(lambda x:self.get_lower_node_value(function_lookup, x), self.tree['lower_nodes'])
return self.execute_node(self.tree['node'], function_lookup, result_list)
def iterate_tree(self, nodes, callback):
for node in nodes:
callback(node)
self.iterate_tree(node['lower_nodes'], callback)
def recalculate_depth(self, nodes, depth):
for node in nodes:
node['depth'] = depth
self.recalculate_depth(node['lower_nodes'], depth+1)
def _get_node_list(self, nodes, node_list):
for node in nodes:
node_list.append(node)
self._get_node_list(node['lower_nodes'], node_list)
def get_node_list(self):
node_list = []
self._get_node_list(self.tree['lower_nodes'], node_list)
return node_list
def _simplify(self, node, function_lookup):
if len(node['lower_nodes']) == 0:
return
terminal_value_count = filter(lambda x:TerminalSet.is_terminal_value(x['node']), node['lower_nodes'])
if node['node']['arity'] == terminal_value_count:
value = self.execute_node(node, function_lookup, args=map(lambda x:x['node']['value'], node['lower_nodes']))
self.log().debug('Replacing existing node: %s' % pformat(node['node']))
node['lower_nodes'] = []
node['node'] = TerminalSet.terminal_value(value)
self.log().debug(' -- with node: %s' % pformat(node['node']))
self.is_simplified = False
else:
map(lambda x:self._simplify(x, function_lookup), node['lower_nodes'])
def simplify(self, function_lookup):
self.is_simplified = False
simplify_loop_count = 1
while not self.is_simplified:
self.log().debug('Simplification %d' % (simplify_loop_count))
self.is_simplified = True
self._simplify(self.tree, function_lookup)
simplify_loop_count += 1
| 2.5625 | 3 |
src/pyrin/packaging/__init__.py | wilsonGmn/pyrin | 0 | 6766 | <reponame>wilsonGmn/pyrin
# -*- coding: utf-8 -*-
"""
packaging package.
"""
from pyrin.packaging.base import Package
class PackagingPackage(Package):
"""
packaging package class.
"""
NAME = __name__
COMPONENT_NAME = 'packaging.component'
CONFIG_STORE_NAMES = ['packaging']
| 1.257813 | 1 |
chap7/heapq_merge.py | marble-git/python-laoqi | 0 | 6767 | <reponame>marble-git/python-laoqi
#coding:utf-8
'''
filename:heapq_merge.py
chap:7
subject:4-2
conditions:heapq.merge,sorted_list:lst1,lst2
lst3=merged_list(lst1,lst2) is sorted
solution:heapq.merge
'''
import heapq
lst1 = [1,3,5,7,9]
lst2 = [2,4,6,8]
if __name__ == '__main__':
lst3 = heapq.merge(lst1,lst2)
print('lst3',lst3)
print(list(lst3))
| 2.90625 | 3 |
lib/googlecloudsdk/third_party/apis/serviceuser/v1/serviceuser_v1_client.py | kustodian/google-cloud-sdk | 0 | 6768 | """Generated client library for serviceuser version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.serviceuser.v1 import serviceuser_v1_messages as messages
class ServiceuserV1(base_api.BaseApiClient):
"""Generated client library for service serviceuser version v1."""
MESSAGES_MODULE = messages
BASE_URL = u'https://serviceuser.googleapis.com/'
_PACKAGE = u'serviceuser'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/service.management']
_VERSION = u'v1'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'ServiceuserV1'
_URL_VERSION = u'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new serviceuser handle."""
url = url or self.BASE_URL
super(ServiceuserV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.projects_services = self.ProjectsServicesService(self)
self.projects = self.ProjectsService(self)
self.services = self.ServicesService(self)
class ProjectsServicesService(base_api.BaseApiService):
"""Service class for the projects_services resource."""
_NAME = u'projects_services'
def __init__(self, client):
super(ServiceuserV1.ProjectsServicesService, self).__init__(client)
self._upload_configs = {
}
def Disable(self, request, global_params=None):
r"""Disable a service so it can no longer be used with a.
project. This prevents unintended usage that may cause unexpected billing
charges or security leaks.
Operation<response: google.protobuf.Empty>
Args:
request: (ServiceuserProjectsServicesDisableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Disable')
return self._RunMethod(
config, request, global_params=global_params)
Disable.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'serviceuser.projects.services.disable',
ordered_params=[u'projectsId', u'servicesId'],
path_params=[u'projectsId', u'servicesId'],
query_params=[],
relative_path=u'v1/projects/{projectsId}/services/{servicesId}:disable',
request_field=u'disableServiceRequest',
request_type_name=u'ServiceuserProjectsServicesDisableRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Enable(self, request, global_params=None):
r"""Enable a service so it can be used with a project.
See [Cloud Auth Guide](https://cloud.google.com/docs/authentication) for
more information.
Operation<response: google.protobuf.Empty>
Args:
request: (ServiceuserProjectsServicesEnableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Enable')
return self._RunMethod(
config, request, global_params=global_params)
Enable.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'serviceuser.projects.services.enable',
ordered_params=[u'projectsId', u'servicesId'],
path_params=[u'projectsId', u'servicesId'],
query_params=[],
relative_path=u'v1/projects/{projectsId}/services/{servicesId}:enable',
request_field=u'enableServiceRequest',
request_type_name=u'ServiceuserProjectsServicesEnableRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""List enabled services for the specified consumer.
Args:
request: (ServiceuserProjectsServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListEnabledServicesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'serviceuser.projects.services.list',
ordered_params=[u'projectsId'],
path_params=[u'projectsId'],
query_params=[u'pageSize', u'pageToken'],
relative_path=u'v1/projects/{projectsId}/services',
request_field='',
request_type_name=u'ServiceuserProjectsServicesListRequest',
response_type_name=u'ListEnabledServicesResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(ServiceuserV1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
class ServicesService(base_api.BaseApiService):
"""Service class for the services resource."""
_NAME = u'services'
def __init__(self, client):
super(ServiceuserV1.ServicesService, self).__init__(client)
self._upload_configs = {
}
def Search(self, request, global_params=None):
r"""Search available services.
When no filter is specified, returns all accessible services. For
authenticated users, also returns all services the calling user has
"servicemanagement.services.bind" permission for.
Args:
request: (ServiceuserServicesSearchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SearchServicesResponse) The response message.
"""
config = self.GetMethodConfig('Search')
return self._RunMethod(
config, request, global_params=global_params)
Search.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'serviceuser.services.search',
ordered_params=[],
path_params=[],
query_params=[u'pageSize', u'pageToken'],
relative_path=u'v1/services:search',
request_field='',
request_type_name=u'ServiceuserServicesSearchRequest',
response_type_name=u'SearchServicesResponse',
supports_download=False,
)
| 1.929688 | 2 |
bindings/python/examples/feature_example.py | lithathampan/wav2letter | 1 | 6769 | #!/usr/bin/env python3
# adapted from wav2letter/src/feature/test/MfccTest.cpp
import itertools as it
import os
import sys
from wav2letter.feature import FeatureParams, Mfcc
def load_data(filename):
path = os.path.join(data_path, filename)
path = os.path.abspath(path)
with open(path) as f:
return [float(x) for x in it.chain.from_iterable(line.split() for line in f)]
if __name__ == "__main__":
if len(sys.argv) != 2:
print(f"usage: {sys.argv[0]} feature_test_data_path", file=sys.stderr)
print(" (usually: <wav2letter_root>/src/feature/test/data)", file=sys.stderr)
sys.exit(1)
data_path = sys.argv[1]
wavinput = load_data("sa1.dat")
# golden features to compare
htkfeatures = load_data("sa1-mfcc.htk")
assert len(wavinput) > 0
assert len(htkfeatures) > 0
params = FeatureParams()
# define parameters of the featurization
params.sampling_freq = 16000
params.low_freq_filterbank = 0
params.high_freq_filterbank = 8000
params.num_filterbank_chans = 20
params.num_cepstral_coeffs = 13
params.use_energy = False
params.zero_mean_frame = False
params.use_power = False
# apply MFCC featurization
mfcc = Mfcc(params)
features = mfcc.apply(wavinput)
# check that obtained features are the same as golden one
assert len(features) == len(htkfeatures)
assert len(features) % 39 == 0
numframes = len(features) // 39
featurescopy = features.copy()
for f in range(numframes):
for i in range(1, 39):
features[f * 39 + i - 1] = features[f * 39 + i]
features[f * 39 + 12] = featurescopy[f * 39 + 0]
features[f * 39 + 25] = featurescopy[f * 39 + 13]
features[f * 39 + 38] = featurescopy[f * 39 + 26]
differences = [abs(x[0] - x[1]) for x in zip(features, htkfeatures)]
print(f"max_diff={max(differences)}")
print(f"avg_diff={sum(differences)/len(differences)}")
| 2.4375 | 2 |
app.py | shreyashack/PY_Message_Decryption | 1 | 6770 | <gh_stars>1-10
from tkinter import *
import onetimepad
class Message_Decrypt:
def __init__(self,root):
self.root=root
self.root.title("Message Decryption")
self.root.geometry("400x475")
self.root.iconbitmap("logo368.ico")
self.root.resizable(0,0)
def on_enter1(e):
but_decrypt['background']="black"
but_decrypt['foreground']="cyan"
def on_leave1(e):
but_decrypt['background']="SystemButtonFace"
but_decrypt['foreground']="SystemButtonText"
def on_enter2(e):
but_clear['background']="black"
but_clear['foreground']="cyan"
def on_leave2(e):
but_clear['background']="SystemButtonFace"
but_clear['foreground']="SystemButtonText"
def clear():
text_decrypt.delete('1.0',"end")
text_decrypt_output.delete('1.0',"end")
def decrypt():
try:
s=text_decrypt.get('1.0','end')
b=s.strip()
x=onetimepad.decrypt(b,'random')
text_decrypt_output.insert('end',x)
except Exception as e:
print(e)
#===========frame==================================#
mainframe=Frame(self.root,width=400,height=475,relief="ridge",bd=4)
mainframe.place(x=0,y=0)
firstframe=Frame(mainframe,width=393,height=207,relief="ridge",bd=4)
firstframe.place(x=0,y=0)
secondframe=Frame(mainframe,width=393,height=207,relief="ridge",bd=4)
secondframe.place(x=0,y=207)
thirdframe=Frame(mainframe,width=393,height=52,relief="ridge",bd=4,bg="gray77")
thirdframe.place(x=0,y=415)
#===================firstframe==============================#
scol=Scrollbar(firstframe,orient="vertical")
scol.place(relx=1, rely=0, relheight=1, anchor='ne')
text_decrypt=Text(firstframe,height=10,width=45,font=('times new roman',12),yscrollcommand=scol.set,relief="sunken",bd=3,fg="black")
text_decrypt.place(x=0,y=0)
scol.config(command=text_decrypt.yview)
#====================secondframe============================#
scol=Scrollbar(secondframe,orient="vertical")
scol.place(relx=1, rely=0, relheight=1, anchor='ne')
text_decrypt_output=Text(secondframe,height=10,width=45,font=('times new roman',12),yscrollcommand=scol.set,relief="sunken",bd=3,fg="black")
text_decrypt_output.place(x=0,y=0)
scol.config(command=text_decrypt_output.yview)
#==================third====================================#
but_decrypt=Button(thirdframe,text="Decrypt",width=13,font=('times new roman',14),cursor="hand2",command=decrypt)
but_decrypt.place(x=20,y=3)
but_decrypt.bind("<Enter>",on_enter1)
but_decrypt.bind("<Leave>",on_leave1)
but_clear=Button(thirdframe,text="Clear",width=13,font=('times new roman',14),cursor="hand2",command=clear)
but_clear.place(x=235,y=3)
but_clear.bind("<Enter>",on_enter2)
but_clear.bind("<Leave>",on_leave2)
if __name__ == "__main__":
root=Tk()
Message_Decrypt(root)
root.mainloop()
| 2.96875 | 3 |
examples/rrbot_p2p_low_energy.py | abcamiletto/urdf2optcontrol | 0 | 6771 | #!/usr/bin/env python3
from urdf2optcontrol import optimizer
from matplotlib import pyplot as plt
import pathlib
# URDF options
urdf_path = pathlib.Path(__file__).parent.joinpath('urdf', 'rrbot.urdf').absolute()
root = "link1"
end = "link3"
in_cond = [0] * 4
def my_cost_func(q, qd, qdd, ee_pos, u, t):
return u.T @ u
def my_constraint1(q, qd, qdd, ee_pos, u, t):
return [-30, -30], u, [30, 30]
def my_constraint2(q, qd, qdd, ee_pos, u, t):
return [-4, -4], qd, [4, 4]
my_constraints = [my_constraint1, my_constraint2]
def my_final_constraint1(q, qd, qdd, ee_pos, u):
return [3.14 / 2, 0], q, [3.14 / 2, 0]
def my_final_constraint2(q, qd, qdd, ee_pos, u):
return [0, 0], qd, [0, 0]
my_final_constraints = [my_final_constraint1, my_final_constraint2]
time_horizon = 2.0
steps = 40
# Load the urdf and calculate the differential equations
optimizer.load_robot(urdf_path, root, end)
# Loading the problem conditions
optimizer.load_problem(
my_cost_func,
steps,
in_cond,
time_horizon=time_horizon,
constraints=my_constraints,
final_constraints=my_final_constraints,
max_iter=500
)
# Solving the non linear problem
res = optimizer.solve()
print('u = ', res['u'][0])
print('q = ', res['q'][0])
# Print the results!
fig = optimizer.plot_result(show=True)
| 2.71875 | 3 |
SocketServer/apps/django-db-pool-master/dbpool/db/backends/postgresql_psycopg2/base.py | fqc/SocketSample_Mina_Socket | 23 | 6772 | """
Pooled PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
from django import get_version as get_django_version
from django.db.backends.postgresql_psycopg2.base import \
DatabaseWrapper as OriginalDatabaseWrapper
from django.db.backends.signals import connection_created
from threading import Lock
import logging
import sys
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
logger = logging.getLogger(__name__)
class PooledConnection():
'''
Thin wrapper around a psycopg2 connection to handle connection pooling.
'''
def __init__(self, pool, test_query=None):
self._pool = pool
# If passed a test query we'll run it to ensure the connection is available
if test_query:
self._wrapped_connection = None
num_attempts = 0
while self._wrapped_connection is None:
num_attempts += 1;
c = pool.getconn()
try:
c.cursor().execute(test_query)
except Database.Error:
pool.putconn(c, close=True)
if num_attempts > self._pool.maxconn:
logger.error("Unable to check out connection from pool %s" % self._pool)
raise;
else:
logger.info("Closing dead connection from pool %s" % self._pool,
exc_info=sys.exc_info())
else:
if not c.autocommit:
c.rollback()
self._wrapped_connection = c
else:
self._wrapped_connection = pool.getconn()
logger.debug("Checked out connection %s from pool %s" % (self._wrapped_connection, self._pool))
def close(self):
'''
Override to return the connection to the pool rather than closing it.
'''
if self._wrapped_connection and self._pool:
logger.debug("Returning connection %s to pool %s" % (self._wrapped_connection, self._pool))
self._pool.putconn(self._wrapped_connection)
self._wrapped_connection = None
def __getattr__(self, attr):
'''
All other calls proxy through to the "real" connection
'''
return getattr(self._wrapped_connection, attr)
'''
This holds our connection pool instances (for each alias in settings.DATABASES that
uses our PooledDatabaseWrapper.)
'''
connection_pools = {}
connection_pools_lock = Lock()
pool_config_defaults = {
'MIN_CONNS': None,
'MAX_CONNS': 1,
'TEST_ON_BORROW': False,
'TEST_ON_BORROW_QUERY': 'SELECT 1'
}
def _set_up_pool_config(self):
'''
Helper to configure pool options during DatabaseWrapper initialization.
'''
self._max_conns = self.settings_dict['OPTIONS'].get('MAX_CONNS', pool_config_defaults['MAX_CONNS'])
self._min_conns = self.settings_dict['OPTIONS'].get('MIN_CONNS', self._max_conns)
self._test_on_borrow = self.settings_dict["OPTIONS"].get('TEST_ON_BORROW',
pool_config_defaults['TEST_ON_BORROW'])
if self._test_on_borrow:
self._test_on_borrow_query = self.settings_dict["OPTIONS"].get('TEST_ON_BORROW_QUERY',
pool_config_defaults['TEST_ON_BORROW_QUERY'])
else:
self._test_on_borrow_query = None
def _create_connection_pool(self, conn_params):
'''
Helper to initialize the connection pool.
'''
connection_pools_lock.acquire()
try:
# One more read to prevent a read/write race condition (We do this
# here to avoid the overhead of locking each time we get a connection.)
if (self.alias not in connection_pools or
connection_pools[self.alias]['settings'] != self.settings_dict):
logger.info("Creating connection pool for db alias %s" % self.alias)
logger.info(" using MIN_CONNS = %s, MAX_CONNS = %s, TEST_ON_BORROW = %s" % (self._min_conns,
self._max_conns,
self._test_on_borrow))
from psycopg2 import pool
connection_pools[self.alias] = {
'pool': pool.ThreadedConnectionPool(self._min_conns, self._max_conns, **conn_params),
'settings': dict(self.settings_dict),
}
finally:
connection_pools_lock.release()
'''
Simple Postgres pooled connection that uses psycopg2's built-in ThreadedConnectionPool
implementation. In Django, use this by specifying MAX_CONNS and (optionally) MIN_CONNS
in the OPTIONS dictionary for the given db entry in settings.DATABASES.
MAX_CONNS should be equal to the maximum number of threads your app server is configured
for. For example, if you are running Gunicorn or Apache/mod_wsgi (in a multiple *process*
configuration) MAX_CONNS should be set to 1, since you'll have a dedicated python
interpreter per process/worker. If you're running Apache/mod_wsgi in a multiple *thread*
configuration set MAX_CONNS to the number of threads you have configured for each process.
By default MIN_CONNS will be set to MAX_CONNS, which prevents connections from being closed.
If your load is spikey and you want to recycle connections, set MIN_CONNS to something lower
than MAX_CONNS. I suggest it should be no lower than your 95th percentile concurrency for
your app server.
If you wish to validate connections on each check out, specify TEST_ON_BORROW (set to True)
in the OPTIONS dictionary for the given db entry. You can also provide an optional
TEST_ON_BORROW_QUERY, which is "SELECT 1" by default.
'''
class DatabaseWrapper16(OriginalDatabaseWrapper):
'''
For Django 1.6.x
TODO: See https://github.com/django/django/commit/1893467784deb6cd8a493997e8bac933cc2e4af9
but more importantly https://github.com/django/django/commit/2ee21d9f0d9eaed0494f3b9cd4b5bc9beffffae5
This code may be no longer needed!
'''
set_up_pool_config = _set_up_pool_config
create_connection_pool = _create_connection_pool
def __init__(self, *args, **kwargs):
super(DatabaseWrapper16, self).__init__(*args, **kwargs)
self.set_up_pool_config()
def get_new_connection(self, conn_params):
# Is this the initial use of the global connection_pools dictionary for
# this python interpreter? Build a ThreadedConnectionPool instance and
# add it to the dictionary if so.
if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != self.settings_dict:
for extra in pool_config_defaults.keys():
if extra in conn_params:
del conn_params[extra]
self.create_connection_pool(conn_params)
return PooledConnection(connection_pools[self.alias]['pool'], test_query=self._test_on_borrow_query)
class DatabaseWrapper14and15(OriginalDatabaseWrapper):
'''
For Django 1.4.x and 1.5.x
'''
set_up_pool_config = _set_up_pool_config
create_connection_pool = _create_connection_pool
def __init__(self, *args, **kwargs):
super(DatabaseWrapper14and15, self).__init__(*args, **kwargs)
self.set_up_pool_config()
def _cursor(self):
settings_dict = self.settings_dict
if self.connection is None or connection_pools[self.alias]['settings'] != settings_dict:
# Is this the initial use of the global connection_pools dictionary for
# this python interpreter? Build a ThreadedConnectionPool instance and
# add it to the dictionary if so.
if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != settings_dict:
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
for extra in ['autocommit'] + pool_config_defaults.keys():
if extra in conn_params:
del conn_params[extra]
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
self.create_connection_pool(conn_params)
self.connection = PooledConnection(connection_pools[self.alias]['pool'],
test_query=self._test_on_borrow_query)
self.connection.set_client_encoding('UTF8')
tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE')
if tz:
try:
get_parameter_status = self.connection.get_parameter_status
except AttributeError:
# psycopg2 < 2.0.12 doesn't have get_parameter_status
conn_tz = None
else:
conn_tz = get_parameter_status('TimeZone')
if conn_tz != tz:
# Set the time zone in autocommit mode (see #17062)
self.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
self.connection.cursor().execute(
self.ops.set_time_zone_sql(), [tz])
self.connection.set_isolation_level(self.isolation_level)
self._get_pg_version()
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
return CursorWrapper(cursor)
class DatabaseWrapper13(OriginalDatabaseWrapper):
'''
For Django 1.3.x
'''
set_up_pool_config = _set_up_pool_config
create_connection_pool = _create_connection_pool
def __init__(self, *args, **kwargs):
super(DatabaseWrapper13, self).__init__(*args, **kwargs)
self.set_up_pool_config()
def _cursor(self):
'''
Override _cursor to plug in our connection pool code. We'll return a wrapped Connection
which can handle returning itself to the pool when its .close() method is called.
'''
from django.db.backends.postgresql.version import get_version
new_connection = False
set_tz = False
settings_dict = self.settings_dict
if self.connection is None or connection_pools[self.alias]['settings'] != settings_dict:
new_connection = True
set_tz = settings_dict.get('TIME_ZONE')
# Is this the initial use of the global connection_pools dictionary for
# this python interpreter? Build a ThreadedConnectionPool instance and
# add it to the dictionary if so.
if self.alias not in connection_pools or connection_pools[self.alias]['settings'] != settings_dict:
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You need to specify NAME in your Django settings file.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
for extra in ['autocommit'] + pool_config_defaults.keys():
if extra in conn_params:
del conn_params[extra]
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = settings_dict['PASSWORD']
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
self.create_connection_pool(conn_params)
self.connection = PooledConnection(connection_pools[self.alias]['pool'],
test_query=self._test_on_borrow_query)
self.connection.set_client_encoding('UTF8')
self.connection.set_isolation_level(self.isolation_level)
# We'll continue to emulate the old signal frequency in case any code depends upon it
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
if new_connection:
if set_tz:
cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']])
if not hasattr(self, '_version'):
self.__class__._version = get_version(cursor)
if self._version[0:2] < (8, 0):
# No savepoint support for earlier version of PostgreSQL.
self.features.uses_savepoints = False
if self.features.uses_autocommit:
if self._version[0:2] < (8, 2):
# FIXME: Needs extra code to do reliable model insert
# handling, so we forbid it for now.
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You cannot use autocommit=True with PostgreSQL prior to 8.2 at the moment.")
else:
# FIXME: Eventually we're enable this by default for
# versions that support it, but, right now, that's hard to
# do without breaking other things (#10509).
self.features.can_return_id_from_insert = True
return CursorWrapper(cursor)
'''
Choose a version of the DatabaseWrapper class to use based on the Django
version. This is a bit hacky, what's a more elegant way?
'''
django_version = get_django_version()
if django_version.startswith('1.3'):
from django.db.backends.postgresql_psycopg2.base import CursorWrapper
class DatabaseWrapper(DatabaseWrapper13):
pass
elif django_version.startswith('1.4') or django_version.startswith('1.5'):
from django.conf import settings
from django.db.backends.postgresql_psycopg2.base import utc_tzinfo_factory, \
CursorWrapper
# The force_str call around the password seems to be the only change from
# 1.4 to 1.5, so we'll use the same DatabaseWrapper class and make
# force_str a no-op.
try:
from django.utils.encoding import force_str
except ImportError:
force_str = lambda x: x
class DatabaseWrapper(DatabaseWrapper14and15):
pass
elif django_version.startswith('1.6'):
class DatabaseWrapper(DatabaseWrapper16):
pass
else:
raise ImportError("Unsupported Django version %s" % django_version)
| 2.671875 | 3 |
backend/tests/test_api/test_api_auth.py | abodacs/fastapi-ml-skeleton | 0 | 6773 | <gh_stars>0
# Skeleton
from fastapi_skeleton.core import messages
def test_auth_using_prediction_api_no_apikey_header(test_client) -> None:
response = test_client.post("/api/model/predict")
assert response.status_code == 400
assert response.json() == {"detail": messages.NO_API_KEY}
def test_auth_using_prediction_api_wrong_apikey_header(test_client) -> None:
response = test_client.post(
"/api/model/predict", json={"image": "test"}, headers={"token": "WRONG_TOKEN"}
)
assert response.status_code == 401
assert response.json() == {"detail": messages.AUTH_REQ}
| 2.109375 | 2 |
docker/cleanup_generators.py | hashnfv/hashnfv-nfvbench | 0 | 6774 | <filename>docker/cleanup_generators.py
# Copyright 2016 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shutil
TREX_OPT = '/opt/trex'
TREX_UNUSED = [
'_t-rex-64-debug', '_t-rex-64-debug-o', 'bp-sim-64', 'bp-sim-64-debug',
't-rex-64-debug', 't-rex-64-debug-o', 'automation/__init__.py',
'automation/graph_template.html',
'automation/config', 'automation/h_avc.py', 'automation/phantom',
'automation/readme.txt', 'automation/regression', 'automation/report_template.html',
'automation/sshpass.exp', 'automation/trex_perf.py', 'wkhtmltopdf-amd64'
]
def remove_unused_libs(path, files):
"""
Remove files not used by traffic generator.
"""
for f in files:
f = os.path.join(path, f)
try:
if os.path.isdir(f):
shutil.rmtree(f)
else:
os.remove(f)
except OSError:
print "Skipped file:"
print f
continue
def get_dir_size(start_path='.'):
"""
Computes size of directory.
:return: size of directory with subdirectiories
"""
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
try:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
except OSError:
continue
return total_size
if __name__ == "__main__":
versions = os.listdir(TREX_OPT)
for version in versions:
trex_path = os.path.join(TREX_OPT, version)
print 'Cleaning TRex', version
try:
size_before = get_dir_size(trex_path)
remove_unused_libs(trex_path, TREX_UNUSED)
size_after = get_dir_size(trex_path)
print '==== Saved Space ===='
print size_before - size_after
except OSError:
import traceback
print traceback.print_exc()
print 'Cleanup was not finished.'
| 2.09375 | 2 |
object_detection/box_coders/mean_stddev_box_coder.py | ophirSarusi/TF_Object_Detection | 59 | 6775 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Mean stddev box coder.
This box coder use the following coding schema to encode boxes:
rel_code = (box_corner - anchor_corner_mean) / anchor_corner_stddev.
"""
from object_detection.core import box_coder
from object_detection.core import box_list
class MeanStddevBoxCoder(box_coder.BoxCoder):
"""Mean stddev box coder."""
@property
def code_size(self):
return 4
def _encode(self, boxes, anchors):
"""Encode a box collection with respect to anchor collection.
Args:
boxes: BoxList holding N boxes to be encoded.
anchors: BoxList of N anchors. We assume that anchors has an associated
stddev field.
Returns:
a tensor representing N anchor-encoded boxes
Raises:
ValueError: if the anchors BoxList does not have a stddev field
"""
if not anchors.has_field('stddev'):
raise ValueError('anchors must have a stddev field')
box_corners = boxes.get()
means = anchors.get()
stddev = anchors.get_field('stddev')
return (box_corners - means) / stddev
def _decode(self, rel_codes, anchors):
"""Decode.
Args:
rel_codes: a tensor representing N anchor-encoded boxes.
anchors: BoxList of anchors. We assume that anchors has an associated
stddev field.
Returns:
boxes: BoxList holding N bounding boxes
Raises:
ValueError: if the anchors BoxList does not have a stddev field
"""
if not anchors.has_field('stddev'):
raise ValueError('anchors must have a stddev field')
means = anchors.get()
stddevs = anchors.get_field('stddev')
box_corners = rel_codes * stddevs + means
return box_list.BoxList(box_corners)
| 2.65625 | 3 |
storage/aug_buffer.py | nsortur/equi_rl | 9 | 6776 | <filename>storage/aug_buffer.py
from storage.buffer import QLearningBuffer
from utils.torch_utils import ExpertTransition, augmentTransition
from utils.parameters import buffer_aug_type
class QLearningBufferAug(QLearningBuffer):
def __init__(self, size, aug_n=9):
super().__init__(size)
self.aug_n = aug_n
def add(self, transition: ExpertTransition):
super().add(transition)
for _ in range(self.aug_n):
super().add(augmentTransition(transition, buffer_aug_type))
| 2.21875 | 2 |
hlrl/torch/agents/wrappers/agent.py | Chainso/HLRL | 0 | 6777 | <reponame>Chainso/HLRL<filename>hlrl/torch/agents/wrappers/agent.py<gh_stars>0
import torch
from typing import Any, Dict, List, OrderedDict, Tuple
from hlrl.core.agents import RLAgent
from hlrl.core.common.wrappers import MethodWrapper
class TorchRLAgent(MethodWrapper):
"""
A torch agent that wraps its experiences as torch tensors.
"""
def __init__(self,
agent: RLAgent,
batch_state: bool = True):
"""
Creates torch agent that can wrap experiences as tensors.
Args:
agent: The agent to wrap.
batch_state: If the state should be batched with a batch size of 1
when transformed.
"""
super().__init__(agent)
self.batch_state = batch_state
def make_tensor(self, data):
"""
Creates a float tensor of the data of batch size 1.
"""
if self.batch_state:
data = [data]
return torch.FloatTensor(data).to(self.algo.device)
def transform_state(self, state):
state_dict = self.om.transform_state(state)
state_dict["state"] = self.make_tensor(state_dict["state"])
return state_dict
def transform_reward(
self,
state: Any,
algo_step: OrderedDict[str, Any],
reward: Any,
terminal: Any,
next_state: Any
) -> Any:
"""
Creates a tensor from the reward.
Args:
state: The state of the environment.
algo_step: The transformed algorithm step of the state.
reward: The reward from the environment.
terminal: If the next state is a terminal state.
next_state: The new state of the environment.
Returns:
The reward as a tensor.
"""
reward = self.om.transform_reward(
state, algo_step, reward, terminal, next_state
)
if self.batch_state:
reward = [reward]
return self.make_tensor(reward)
def transform_terminal(self, terminal: Any, info: Any) -> Any:
"""
Transforms the terminal of an environment step.
Args:
terminal: The terminal value to transform.
info: Additional environment information for the step.
Returns:
The transformed terminal.
"""
terminal = self.om.transform_terminal(terminal, info)
if self.batch_state:
terminal = [terminal]
return self.make_tensor(terminal)
def transform_action(self, action):
return self.om.transform_action(action).squeeze().cpu().numpy()
def reward_to_float(self, reward: torch.Tensor) -> float:
"""
Converts the reward to a single float value.
Args:
reward: The reward to turn into a float.
Returns:
The float value of the reward tensor.
"""
reward = reward[0].detach().cpu()
reward = reward.item()
return reward
def create_batch(
self,
ready_experiences: Dict[str, List[Any]],
) -> Dict[str, torch.Tensor]:
"""
Creates a batch of experiences to be trained on from the ready
experiences.
Args:
ready_experiences: The experiences to be trained on.
Returns:
A dictionary of each field necessary for training.
"""
batch = {
key: torch.cat(ready_experiences[key]) for key in ready_experiences
}
return self.om.create_batch(batch)
| 2.28125 | 2 |
PixivConstant.py | NHOrus/PixivUtil2 | 0 | 6778 | <reponame>NHOrus/PixivUtil2
# -*- coding: utf-8 -*-
PIXIVUTIL_VERSION = '20191220-beta1'
PIXIVUTIL_LINK = 'https://github.com/Nandaka/PixivUtil2/releases'
PIXIVUTIL_DONATE = 'https://bit.ly/PixivUtilDonation'
# Log Settings
PIXIVUTIL_LOG_FILE = 'pixivutil.log'
PIXIVUTIL_LOG_SIZE = 10485760
PIXIVUTIL_LOG_COUNT = 10
PIXIVUTIL_LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# Download Results
PIXIVUTIL_NOT_OK = -1
PIXIVUTIL_OK = 0
PIXIVUTIL_SKIP_OLDER = 1
PIXIVUTIL_SKIP_BLACKLIST = 2
PIXIVUTIL_KEYBOARD_INTERRUPT = 3
PIXIVUTIL_SKIP_DUPLICATE = 4
PIXIVUTIL_SKIP_LOCAL_LARGER = 5
PIXIVUTIL_CHECK_DOWNLOAD = 6
PIXIVUTIL_ABORTED = 9999
BUFFER_SIZE = 8192
| 1.101563 | 1 |
dynamic_schemas/views.py | Threemusketeerz/DSystems | 1 | 6779 | from django.http import Http404
from django.shortcuts import render, redirect, reverse
from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.renderers import TemplateHTMLRenderer
from .models import Schema, SchemaColumn, SchemaResponse, SchemaUrl
from .forms import SchemaResponseForm, ResponseUpdateForm
from .serializers import SchemaResponseSerializer
from .prepare_data import getcolumns
import pytz
class SchemaIndexView(LoginRequiredMixin, ListView):
# login_url = '/accounts/login.html/'
template_name = 'dynamic_schemas/index.html'
context_object_name = 'all_schemas'
def get_queryset(self):
return Schema.objects.all()
@login_required
def form_view(request, pk):
schema = Schema.objects.get(pk=pk)
urls = schema.help_field.all()
if request.method == 'POST':
form = SchemaResponseForm(schema, request.POST)
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
return redirect(reverse('dynamic_schemas:schema_view',
kwargs={'pk': pk}))
else:
form = SchemaResponseForm(schema)
return render(request, f'dynamic_schemas/create-form.html', \
{
'form': form,
'schema': schema,
'help_urls': urls,
})
@login_required
def form_update_view(request, pk, r_pk):
schema = Schema.objects.get(pk=pk)
instance = SchemaResponse.objects.get(schema=schema, pk=r_pk)
columns = SchemaColumn.objects.filter(schema=schema)
###################################################
# This little snippet checks if the responses can be edited. If they can
# the submit button will be provided. There is no restriction on
# has_been_edited, but since the data cant be saved we're good for now.
load_button = False
aggr_editables = [c.is_editable_once for c in columns]
if True in aggr_editables:
load_button = True
###################################################
form = ResponseUpdateForm(instance, pk)
if request.method == 'POST':
form = ResponseUpdateForm(instance, pk, request.POST or None)
if form.is_valid():
form.update()
return redirect(reverse('dynamic_schemas:schema_view',
kwargs={'pk': pk}))
return render(request, f'dynamic_schemas/update-form.html',
{'form_update': form,
'load_button': load_button}
)
""" API Views """
class MakeDataPrettyMixin:
def _make_date_tz(self, instance=None, tz=None):
""" Takes an instance, and sets its timezone.
TODO:
Should this be a classmethod? Will a classmethod complicate the
view in its context?
"""
# Can this be moved to SETTINGS instead? Same for _make_date_readable.
# Problem is probably that the UTC format gets overridden.
if instance:
if tz:
tz = pytz.timezone(tz)
return instance.pub_date.astimezone(tz)
return
def _make_date_readable(self, instances):
"""
Helper function to change the dates to a format pleasing to the
eyes, takes a bundle of instances and converts their time.
How extensible do we want this?
Function is kept private for now, since in Denmark the timezone is CET.
"""
for instance in instances:
inst_as_cet = self._make_date_tz(
instance=instance
# tz='Europe/Copenhagen'
)
instance.pub_date = inst_as_cet \
.strftime('%d-%m/%Y %H:%M:%S')
return instances
def _make_user_readable(self, serializer):
""" Helper to return the correct attributes to the front-end
"""
for data in serializer.data:
# import ipdb; ipdb.set_trace()
user = data['user']
instance = User.objects.get(id=user)
user = instance.first_name + instance.last_name
if instance.first_name == '':
user = instance.username
data['user'] = user
# __import__('ipdb').set_trace()
# import ipdb; ipdb.set_trace()
return serializer
def _make_intruction_links_readable(self, serializer):
for data in serializer.data:
instr = data['instruction']
instance = SchemaUrl.objects.get(id=instr)
instr = '<a href="'+ instance.url +'">'+ instance.name +'</a>'
data['instruction'] = instr
return serializer
class ResponseList(MakeDataPrettyMixin, APIView):
"""
Lists responses according to schema.
Purely for APIView for now. Not being used in the actual rendering af the
tables.
"""
default_order = [
('desc', '-'),
('asc', ''),
]
def get_orderprefix(self, order):
for tup in self.default_order:
if order in tup:
return tup[1]
def get(self, request, pk, format=None, *args):
req = request.GET
# Amount of data to fetch each pull
start = int(req.get('start', 0))
length = int(req.get('length', 30))
end = start + length;
order = req.get('order[0][dir]')
order_column = req.get('order[0][column]')
order_by_pre = self.get_orderprefix(order)
order_column_name = req.get('columns['+order_column+'][data]')
# __import__('ipdb').set_trace()
order_str = order_by_pre + order_column_name
draw = req.get('draw')
# TODO Gonna require some thinking. Also need to user recordsFiltered.
# search = req.get('search[value]')
schema = Schema.objects.get(pk=pk)
responses_count = SchemaResponse.objects.filter(schema=schema).count()
responses = SchemaResponse \
.objects \
.filter(schema=schema) \
.order_by(order_str)[start:end]
# __import__('ipdb').set_trace()
responses = self._make_date_readable(responses)
serializer = SchemaResponseSerializer(responses, many=True)
serializer = self._make_user_readable(serializer)
serializer = self._make_intruction_links_readable(serializer)
return_data = {
'draw': int(draw),
'recordsTotal': responses_count,
'recordsFiltered': responses_count,
'data': serializer.data,
}
# __import__('ipdb').set_trace()
return Response(return_data)
class ResponseColumns(APIView):
def get(self, request, pk, format=None, *args):
req = request.GET
schema = Schema.objects.get(pk=pk)
sr = SchemaResponse.objects.filter(schema=schema).first()
columns = getcolumns(sr).getvalue()
return Response(columns)
class SchemaView(LoginRequiredMixin, APIView):
"""
Fetches the FIRST object from ResponseList. Makes it availabe for
as a template for the table in main.html
Excludes schema.id, and the placeholder qa_set in the template.
"""
renderer_classes = [TemplateHTMLRenderer]
template_name = 'dynamic_schemas/table_dev.html'
def get_object(self, pk):
try:
schema = Schema.objects.get(pk=pk)
if SchemaColumn.objects.filter(schema=schema).count() != 0:
all_responses = SchemaResponse.objects.filter(schema=schema)
single_response = all_responses.first()
serializer = SchemaResponseSerializer(single_response)
return serializer.data
except single_response.DoesNotExist:
raise Http404
def get(self, request, pk):
schema = Schema.objects.get(pk=pk)
schema_help_urls = schema.help_field.all()
schema_obsolete = schema.obsolete.all()
schema_new = schema.new.all()
all_responses = SchemaResponse.objects.filter(schema=schema)
# self._make_date_readable(all_responses)
serializer = SchemaResponseSerializer(all_responses, many=True)
data = {'single_response': self.get_object(pk),
'all_responses': serializer.data,
'pk': pk,
'schema': schema,
'help_urls': schema_help_urls,
'schema_obsolete': schema_obsolete,
'schema_new': schema_new,
}
# __import__('ipdb').set_trace()
return Response(data)
| 1.976563 | 2 |
my_classes/.history/ModulesPackages_PackageNamespaces/example3a/main_20210725220637.py | minefarmer/deep-Dive-1 | 0 | 6780 |
import os.path
import types
import sys
| 1.023438 | 1 |
api/views.py | conscience99/lyriko | 0 | 6781 | from django.shortcuts import render
from rest_framework import response
from rest_framework.serializers import Serializer
from . import serializers
from rest_framework.response import Response
from rest_framework.views import APIView
from django.views import View
from rest_framework import status
from . models import SaveList, User, Lyrics, SearchHistory, VerificationCode, SubmitLyrics
from rest_framework.permissions import BasePermission, IsAuthenticated, SAFE_METHODS, IsAdminUser
from rest_framework.authtoken.models import Token
from django.contrib.auth.hashers import make_password, check_password
from django.contrib.auth import login, authenticate
import requests
from django.db.models import Q
from bs4 import BeautifulSoup
import json
from datetime import datetime
import random
from django.core.mail import EmailMessage, EmailMultiAlternatives
from django.conf import settings
from django.template.loader import get_template
from django.urls import reverse
import jwt
from django.utils.encoding import force_bytes, force_text, DjangoUnicodeDecodeError
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.sites.shortcuts import get_current_site
from .utils import Util
from rest_framework_simplejwt.tokens import RefreshToken
from django.template import Context
from django.http import HttpResponse, HttpResponseNotFound
import os
import re
import urllib
from datetime import datetime
import random
import time
now = datetime.now()
import json
class SignupView(APIView):
now = datetime.now()
def post(self, request, *args,**kwargs):
user=User()
try:
User.objects.get(email=request.data['email'])
return Response({"email":"already taken"})
except:
serializer=serializers.UserSerializer(data=request.data)
if serializer.is_valid():
password=make_password(request.data['password'])
username=request.data['username']
user.username=username
user.first_name=request.data['first_name']
user.last_name=request.data['last_name']
user.email=request.data['email']
user.email_username=request.data['email']
user.password=password
user.is_verified = False
user.save()
new_user=User.objects.get(id=user.id)
token=Token.objects.create(user=new_user)
verification = VerificationCode()
code = random.randint(199999,999999)
verification.code=code
verification.user_id=new_user.id
verification._year = now.year
verification._month = now.month
verification._day = now.day
verification._hour = now.hour
verification._minute = now.minute
verification.save()
from_e = settings.EMAIL_HOST_USER
to=request.data['email']
html = get_template('api/code.html')
html_content = html.render({'username':new_user.username, 'code':code})
text = 'Hi {username}, \n Please use {code} to continue with Lyriko.'
subject = 'Confirm your email'
email = EmailMultiAlternatives(
subject,
text,
from_e,
[to]
)
email.attach_alternative(html_content, 'text/html')
try:
email.send()
except:
pass
token=Token.objects.get(user=user)
response={'token':token.key, 'user':serializer.data}
return Response(response)
else:
return Response(serializer.errors)
class SendCode(APIView):
def post(self, request, *args, **kwargs):
try:
user = User.objects.get(email=request.data['email'])
except:
return Response({"error":"User not found."})
try:
v = VerificationCode.objects.get(user_id=user.id)
v.delete()
except:
pass
verification = VerificationCode()
code = random.randint(199999,999999)
verification.code=code
verification.user_id=user.id
verification._year = now.year
verification._month = now.month
verification._day = now.day
verification._hour = now.hour
verification._minute = now.minute
verification.save()
from_e = settings.EMAIL_HOST_USER
to=request.data['email']
html = get_template('api/code.html')
html_content = html.render({'username':user.username, 'code':code})
text = 'Hi {username}, \n Please use {code} to continue with Lyriko.'
subject = 'Action Required'
email = EmailMultiAlternatives(
subject,
text,
from_e,
[to]
)
email.attach_alternative(html_content, 'text/html')
try:
email.send()
except:
return Response({"error":"Error occured"})
return Response({"success":"Success"})
class AccountActivation(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user=User.objects.get(username=request.user.username)
code=request.data['code']
try:
verification = VerificationCode.objects.get(user_id=user.id, code=int(code))
user.is_verified=True
user.save()
verification.delete()
return Response({'msg':'success'})
except:
return Response({'error':'Invalid code.'})
class VerifyUser(APIView):
def post(self, request, *args, **kwargs):
user = User.objects.get(email=request.data['email'])
code = request.data['code']
try:
_code = VerificationCode.objects.get(code=int(code), user_id=user.id)
_code.delete()
return Response({"msg":"success"})
except:
return Response({"error":"invalid code"})
class CheckSaveList(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
try:
if SaveList.objects.get(owner_username=request.user.username, lyrics_id=request.data['lyrics_id']):
return Response({"watchlisted":'true'})
except:
return Response({"watchlisted":'false'})
class LyricsView(APIView):
def get(self, request, *args, **kwargs):
if request.method=='GET':
lyrics_items=Lyrics.objects.all()
serializer = serializers.LyricsSerializer(lyrics_items,many=True)
response={'lyrics':serializer.data}
return Response(response, status=status.HTTP_200_OK)
else:
response={'error':'Forbidden'}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
class AddLyricsView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
if request.method=='POST':
data=request.data
lyrics=Lyrics()
serializer=serializers.LyricsSerializer(data=data)
if serializer.is_valid():
lyrics.title=request.POST['title']
lyrics.artist=request.POST['artist']
lyrics.body=request.POST['body']
lyrics.title_slug=request.POST['title'].replace(' ', '-').lower()
lyrics.artist_slug=request.POST['artist'].replace(' ', '-').lower()
response={'lyrics':serializer.data}
return Response(response,status=status.HTTP_200_OK )
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class SingleLyricsView(APIView):
def post(self, request, *args, **kwargs ):
artist = request.data['artist'].strip().replace("-"," ").title()
title=request.data['title'].strip().replace("-"," ").title()
search_history=SearchHistory()
title_clean1=request.data['title'].strip().replace("ain-t", "aint")
title_clean2=title_clean1.replace('you-re', 'youre')
title_cleean3 = title_clean2.replace('isn-t', 'isnt')
title_clean4 =title_cleean3.replace('aren-t', 'arent')
title_clean_5= title_clean4.replace("weren-t","werent")
title_clean6 = title_clean_5.replace("can-t", "cant")
title_clean7 = title_clean6.replace('don-t', 'dont')
title_clean8 = title_clean7.replace('i-d', 'id').replace('i-ve', 'ive').replace('we-ve','weve',).replace('you-ve', 'youve').replace('he-s', 'hes').replace('she-s', 'shes').replace('it-s', 'its',).replace('you-d', 'youd').replace('i-ll', 'ill').replace("you-ll", "youll").replace('let-s', "lets").replace("amn't", "amnt").replace("haven-t","havent")
try:
lyrics_item=Lyrics.objects.get(artist_slug=request.data['artist'], title_slug__icontains=title_clean8)
views = lyrics_item.views
updt_views=views+1
lyrics_item.views = updt_views
lyrics_item.save()
serializer=serializers.LyricsSerializer(lyrics_item, many=False)
response={'lyrics':serializer.data}
### Record activities ###
search_history.searcher_username = request.data['username']
search_history.artist=artist.replace('-',' ')
search_history.title=title.replace('-',' ')
search_history.save()
return Response(response,status=status.HTTP_200_OK)
except Lyrics.DoesNotExist:
return Response({"error":"Not Found"})
class SearchHistoryView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs ):
search_history_items=SearchHistory.objects.filter(searcher_username=request.user.username).order_by('-moment').all()
serializer=serializers.SearchHistorySerializer(search_history_items, many=True)
response={"search_history":serializer.data}
return Response(response,status=status.HTTP_200_OK)
class DeleteHistory(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
searcher_username = request.user.username
history_item_id = request.data['id']
try:
SearchHistory.objects.get(searcher_username=searcher_username, id=history_item_id).delete()
return Response({"msg":"OK"})
except:
return Response({"msg":"Something went wrong"})
class TrendingView(APIView):
def get(self, request, *args, **kwargs):
lyrics=Lyrics.objects.order_by('-views')[0:35]
serializer=serializers.LyricsSerializer(lyrics, many=True)
response={"top":serializer.data}
return Response(response)
class RandomView(APIView):
def get(self, request,*args, **kwargs, ):
lyrics=Lyrics.objects.all()
lyrics_items=[]
for lyric in lyrics:
lyrics_items.append(lyric)
random_lyrics=random.choice(lyrics_items)
serializer=serializers.LyricsSerializer(random_lyrics)
resp={"lyrics":serializer.data}
return Response(resp)
class RecentView(APIView):
def get(self, request, *args, **kwargs):
recent_items=SearchHistory.objects.order_by('-moment').all()[:20]
recent = []
for i in recent_items:
recent.append(i)
serializer=serializers.SearchHistorySerializer(recent, many=True)
resp={"recent":serializer.data}
return Response(resp)
class SuggestionView(APIView):
def post(self, request, *args, **kwargs):
_type=request.data['type']
if _type=="title":
lyrics=Lyrics.objects.filter(title__contains=request.data['title'])
serializer=serializers.LyricsSerializer(lyrics, many=True)
resp={'suggestions':serializer.data}
return Response(resp)
else:
lyrics=Lyrics.objects.filter(artist__contains=request.data['artist'])
serializer=serializers.LyricsSerializer(lyrics, many=True)
resp={'suggestions':serializer.data}
return Response(resp)
class ChangePassword(APIView):
def post(self, request, *args, **kwargs):
if request.data['access'] == "code":
try:
user = User.objects.get(email=request.data['email'])
except:
pass
user.password = <PASSWORD>_password(request.data['new_password'])
user.save()
return Response({"msg":"success"})
else:
user = User.objects.get(username=request.user.username)
current_password = request.data['current_password']
if check_password(current_password, user.password):
user.password = <PASSWORD>_password(request.data['new_password'])
user.save()
return Response({"success":"Password changed"})
else:
return Response({"error":"Incorrect password"})
class modifyUser(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user = User.objects.get(pk=request.user.id)
new_email = request.data['email']
old_email = user.email
if new_email != old_email:
user.is_verified = False
user.username = request.data['username']
user.email = new_email
user.first_name = request.data['first_name']
user.last_name = request.data['last_name']
user.save()
n_user = User.objects.get(id=request.user.id)
serializer=serializers.UserSerializer(user, many=False)
response={'user':serializer.data}
return Response(response)
''' class EditLyricsView(APIView):
def post(self, request, pk, *args, **kwargs ):
data=request.data
lyrics=Lyrics.objects.get(pk=pk)
lyrics.title=request.POST['title']
lyrics.artist=request.POST['artist']
lyrics.body=request.POST['body']
Lyrics.objects.get(pk=pk)
lyrics.save()
lyrics_item=Lyrics.objects.get(pk=pk)
serializer=serializers.LyricsSerializer(lyrics_item,many=False)
response={'lyrics':serializer.data}
return Response(response,status=status.HTTP_200_OK ) '''
class SaveListView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs):
save_list_items=SaveList.objects.filter(owner_username=request.user.username)
save_list=[]
for i in save_list_items:
lyrics = Lyrics.objects.get(pk=i.lyrics_id)
save_list.append(lyrics)
serializer = serializers.LyricsSerializer(save_list, many=True)
return Response({'lyrics':serializer.data}, status=status.HTTP_200_OK)
class AddSaveListView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
items=SaveList.objects.filter(owner_username=request.user.username)
data=request.data
username=request.user.username
savelist=SaveList()
try:
if SaveList.objects.get(owner_username=request.user.username, lyrics_id=request.data['lyrics_id']):
return Response({"Error":"Cannot add lyrics to Save List twice or more."})
except:
savelist.lyrics_id=request.data['lyrics_id']
savelist.owner_username=username
savelist.save()
save_list_items=SaveList.objects.filter(owner_username=request.user.username)
save_list = []
for save_list_item in save_list_items:
sl = Lyrics.objects.get(pk=save_list_item.lyrics_id)
save_list.append(sl)
serializer = serializers.LyricsSerializer(save_list, many=True)
response={'save_list':serializer.data}
return Response(response, status=status.HTTP_200_OK)
class RemoveSaveListView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
owner_username=request.user.username
lyrics_id=request.data['lyrics_id']
save_list_item=SaveList.objects.get(owner_username=owner_username, lyrics_id=lyrics_id)
save_list_item.delete()
save_list_items=SaveList.objects.filter(owner_username=request.user.username)
save_list = []
for save_list_item in save_list_items:
sl = Lyrics.objects.get(pk=save_list_item.lyrics_id)
save_list.append(sl)
serializer = serializers.LyricsSerializer(save_list, many=True)
response={'save_list':serializer.data}
return Response(response, status=status.HTTP_200_OK)
class CheckUserView(APIView):
def post(self, request, *args, **kwargs):
try:
User.objects.get(username=request.data['username'])
return Response({'true'}, status=status.HTTP_200_OK)
except User.DoesNotExist:
return Response({'false'})
""" class SignupView(APIView):
def post(self, request, *args, **kwargs):
user=User()
serializer=serializers.UserSerializer(data=request.data)
print(request.data)
if serializer.is_valid():
password=<PASSWORD>(request.data['password'])
username=request.data['username']
user.username=username
user.first_name=request.data['first_name']
user.last_name=request.data['last_name']
user.email=request.data['email']
user.email_username=request.data['email']
user.password=password
user.save()
new_user=User.objects.get(username=username)
print(new_user)
token=Token.objects.create(user=new_user)
response={'token':token.key, 'user':serializer.data}
return Response(response, status=status.HTTP_200_OK)
else:
return Response(serializer.errors) """
class UserDataView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs):
user=User.objects.get(username=request.user.username)
serializer=serializers.UserSerializer(user, many=False)
response={'user':serializer.data}
return Response(response, status=status.HTTP_200_OK)
class SigninView(APIView):
def post(self, request, *args, **kwargs):
password=request.data['password']
username=request.data['username']
try:
if '@' not in username:
user=User.objects.get(username=username)
elif '@' in username:
user=User.objects.get(email_username=username)
except:
return Response({'error':'User not found.'})
if check_password(password, user.password):
login(self.request, user)
token=Token.objects.get(user=user)
serializer=serializers.UserSerializer(user, many=False)
response={'user':serializer.data, 'token':token.key}
return Response(response, status=status.HTTP_200_OK)
else:
return Response({'error':'Incorrect password'})
class SubmitLyricsv(APIView):
def post(self, request, *args, **kwargs):
serializer = serializers.SubmitLyricsSerializer(data=request.data)
if serializer.is_valid:
sl=SubmitLyrics()
sl.title=request.data['title']
sl.artist=request.data['artist']
sl.body=request.data['body']
sl.save()
response = {"msg":"OK"}
return Response(response)
else:
return Response({serializers.errors})
class ApproveSubmitLyrics(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user = request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
lyrics = Lyrics()
lyrics.artist = request.data['artist']
lyrics.artist_slug = request.data['artist'].strip().replace(" ","-").lower()
lyrics.title = request.data['title']
lyrics.title_slug=request.data['title'].strip().replace(" ","-").lower()
lyrics.body = request.data['body']
lyrics.save()
sl = SubmitLyrics.objects.get(id=request.data['id']).delete()
return Response({"msg":"OK"})
class SubmitLyricsListView(APIView):
permission_classes=[IsAuthenticated]
def get(self, request, *args, **kwargs):
user=request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
sub = SubmitLyrics.objects.all()
serializer = serializers.SubmitLyricsSerializer(sub, many=True)
res = {"submit_lyrics_view":serializer.data}
return Response(res)
class SubmitLyricsView(APIView):
permission_classes=[IsAuthenticated]
def post(self, request, *args, **kwargs):
user = request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
item = SubmitLyrics.objects.get(id=request.data['id'])
serializer = serializers.SubmitLyricsSerializer(item, many=False)
res = {"submit_lyrics_item":serializer.data}
return Response(res)
class DeclineSubmitLyrics(APIView):
def post(self, request, *args, **kwargs):
user = request.user
if user.is_lyrics_admin != True:
return Response({"Error":"Forbidden"})
else:
item = SubmitLyrics.objects.get(id=request.data['id'])
item.delete()
return Response({"msg":"OK"})
class RelatedView(APIView):
def post(self, request, *args, **kwargs):
lyrics = Lyrics.objects.filter(artist_slug=request.data['artist'])[0:10]
serializer=serializers.LyricsSerializer(lyrics, many=True)
response={"top":serializer.data}
return Response(response)
class SearchViewv(APIView):
def post(self, request, *args, **kwargs):
if request.data['term']:
term=request.data['term']
terms = term.split()
results =[]
for i in terms:
if i!="by":
for j in Lyrics.objects.filter(title__icontains=i):
results.append(j)
for k in Lyrics.objects.filter(artist__icontains=i):
results.append(k)
search_res = [i for j, i in enumerate(results) if i not in results[:j]]
serializer=serializers.LyricsSerializer(search_res, many=True)
response={"result":serializer.data}
return Response(response)
else:
return Response({"error":"Unavailable"})
""" data = requests.get(f"https://api.lyrics.ovh/v1/{artistSlug}/{titleSlug}/")
lyric = data.json()
if data.status_code == 200:
lyrics.title=title
lyrics.artist=artist
lyrics.title_slug=titleSlug
lyrics.artist_slug=artistSlug
lyrics.body=lyric['lyrics']
lyrics.save()
lyrics_item=Lyrics.objects.get(title_slug=title_slug, artist_slug=artist_slug)
searchHistory.lyrics_id = lyrics_item.id
searchHistory.searcher_username = request.user.username
searchHistory.moment=now.strftime('%Y-%m-%d %H:%M:%S')
searchHistory.save()
serializer=serializers.LyricsSerializer(lyrics_item, many=False)
response={'lyrics':serializer.data}
return Response(response,status=status.HTTP_200_OK ) """
| 2 | 2 |
__dm__.py | AbhilashDatta/InstagramBot | 12 | 6782 | from selenium import webdriver
from time import sleep
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
def Dm(driver,user,message):
''' This function is used to direct message a single user/group '''
driver.get('https://www.instagram.com/direct/inbox/')
send_message_button = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, '//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div/div[3]/div/button'))).click()
search_user = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[5]/div/div/div[2]/div[1]/div/div[2]/input')))
search_user.send_keys(user)
selector = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[5]/div/div/div[2]/div[2]/div/div/div[3]/button/span'))).click()
next_button = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[5]/div/div/div[1]/div/div[2]/div/button/div'))).click()
try:
text = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[2]/textarea')))
text.send_keys(message)
send = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[@id="react-root"]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[3]/button'))).click()
driver.get('https://www.instagram.com/direct/inbox/')
except:
print('No message sent to '+user)
driver.get('https://www.instagram.com/direct/inbox/') | 2.8125 | 3 |
mashov.py | Yotamefr/BeitBiram | 1 | 6783 | import requests
from datetime import datetime
import json
from extras import Day, Lesson
class PasswordError(Exception):
pass
class LoginFailed(Exception):
pass
class MashovAPI:
"""
MashovAPI
Originally made by Xiddoc. Project can be found here: https://github.com/Xiddoc/MashovAPI
Modifications were made by me, Yotamefr.
"""
def __init__(self, username, **kwargs):
"""
Parameters
------------
username -> Represents the username
------------
There are some weird stuff here. I might clean it in a while
Again, this code wasn't made by me, just modified by me
"""
self.url = "https://web.mashov.info/api/{}/"
self.session = requests.Session()
self.session.headers.update({'Accept': 'application/json, text/plain, */*',
'Referer': 'https://web.mashov.info/students/login',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36',
'Content-Type': 'application/json'})
self.username = username
self.auth_ID = 0
self.user_ID = self.auth_ID
self.uid = self.auth_ID
self.uID = self.auth_ID
self.guid = self.auth_ID
self.guID = self.auth_ID
self.school_site = ""
self.moodle_site = ""
self.school_name = ""
self.last_name = ""
self.first_name = ""
self.class_name = ""
self.last_pass = ""
self.last_login = ""
self.school_years = []
self.csrf_token = ""
self.user_children = {}
# Kwargs password
if "password" in kwargs:
self.password = kwargs["password"]
else:
self.password = False
# Kwargs schoolData
if "schoolData" in kwargs:
self.school_data = kwargs["schoolData"]
else:
self.school_data = False
# Kwargs schoolID
if "schoolID" in kwargs:
self.school_ID = kwargs["schoolID"]
elif not self.school_data:
self.school_data = self.get_schools()
self.school_ID = self.get_school_ID_by_name(kwargs["schoolName"])
self.current_year = datetime.now().year + 1
def login(self):
"""
Parameters
------------
------------
"""
if not self.password:
raise PasswordError("No password entered.")
self.login_data = {'semel': self.school_ID,
'username': self.username,
'password': <PASSWORD>,
'year': self.current_year}
self.ret_data = self.send("login", "post", self.login_data)
self.ret_text = json.loads(self.ret_data.text)
if not self.ret_data.status_code == 200:
self.is_logged_in = False
raise LoginFailed()
self.is_logged_in = True
self.auth_ID = self.ret_text["credential"]["userId"]
self.user_ID = self.auth_ID
self.uid = self.auth_ID
self.uID = self.auth_ID
self.guid = self.auth_ID
self.guID = self.auth_ID
self.school_site = self.ret_text["accessToken"]["schoolOptions"]["schoolSite"]
self.moodle_site = self.ret_text["accessToken"]["schoolOptions"]["moodleSite"]
self.school_name = self.ret_text["accessToken"]["schoolOptions"]["schoolName"]
self.last_name = self.ret_text["accessToken"]["children"][0]["familyName"]
self.first_name = self.ret_text["accessToken"]["children"][0]["privateName"]
self.class_name = f'{self.ret_text["accessToken"]["children"][0]["classNum"]}{self.ret_text["accessToken"]["children"][0]["classCode"]}'
self.last_pass = self.ret_text["accessToken"]["lastPassSet"]
self.last_login = self.ret_text["accessToken"]["lastLogin"]
self.school_years = self.ret_text["accessToken"]["userSchoolYears"]
self.csrf_token = self.ret_data.cookies["Csrf-Token"]
self.session.headers.update({"x-csrf-token": self.csrf_token})
self.user_children = self.ret_text["accessToken"]["children"]
del self.username
del self.password
@property
def timetable(self):
return self.form_return(self.send(f"students/{self.user_ID}/timetable", "get"))
def update_school_data(self):
"""
Parameters
------------
------------
"""
self.school_data = self.form_return(self.send("schools", "get"))
def get_schools(self):
"""
Parameters
------------
------------
"""
self.update_school_data()
return self.school_data()
def get_school_ID_by_name(self, school):
"""
Parameters
------------
school -> Represents the school name
------------
"""
if self.school_data:
schoolData = self.school_data
else:
schoolData = self.update_school_data()
for schools in schoolData:
if schools["name"].find(school) == 0:
return schools["semel"]
def clear_session(self):
"""
Parameters
------------
------------
"""
return self.form_return(self.send("clearSession", "get"))
def get_special_lessons(self):
"""
Parameters
------------
------------
"""
return self.get_private_lessons()
def get_private_lessons(self):
"""
Parameters
------------
------------
"""
return self.form_return(self.send("students/{}/specialHoursLessons".format(self.auth_ID), "get"))
def get_private_lesson_types(self):
"""
Parameters
------------
------------
"""
return self.form_return(self.send("lessonsTypes", "get"))
@property
def classes(self):
return self.groups
@property
def groups(self):
return self.form_return(self.send("students/{}/groups".format(self.auth_ID), "get"))
@property
def teachers(self):
recipents = self.recipents
teachers = []
for i in recipents:
if "הורים/" not in i["displayName"]:
teachers.append(i)
return teachers
@property
def recipents(self):
return self.form_return(self.send("mail/recipients", "get"))
def form_return(self, response):
"""
Parameters
------------
response -> Represents the response from the website
------------
"""
if response.status_code != 200:
return False
else:
try:
return json.loads(response.text)
except:
return response.text
def send(self, url, method="get", params={}, files={}):
"""
Parameters
------------
url -> Represents the url to go to
method -> Represents the method to use. Can be either `get` or `post`
params -> Represents the parameters to send to the website. Only use it on `post`
files -> Pretty much the same as for the params
------------
"""
return getattr(self.session, str(method).strip().lower())(self.url.format(url), data=json.dumps(params),
files=files)
def __str__(self):
return json.dumps({
"MashovAPI": {
"url": self.url,
"sessionH": dict(self.session.headers),
"sessionC": self.session.cookies.get_dict(),
"username": self.username,
"password": self.password,
"schoolData": self.school_data,
"schoolID": self.school_ID,
"currentYear": self.current_year,
"loginData": self.login_data,
"isLoggedIn": self.is_logged_in,
"authID": self.auth_ID,
"userID": self.user_ID,
"uid": self.uid,
"uID": self.uID,
"guid": self.guid,
"guID": self.guID,
"schoolSite": self.school_site,
"moodleSite": self.moodle_site,
"schoolName": self.school_name,
"lastName": self.last_name,
"firstName": self.first_name,
"className": self.class_name,
"lastPass": self.last_pass,
"lastLogin": self.last_login,
"schoolYears": self.school_years,
"csrfToken": self.csrf_token,
"userChildren": self.user_children
}})
def get_day(self, day_num: int):
"""
Parameters
------------
day -> Represents the day number
------------
"""
day = []
timetable = []
for i in self.timetable:
if i["timeTable"]["day"] == day_num:
timetable.append(i)
for i in range(len(timetable)):
for j in range(i+1, len(timetable), 1):
if timetable[i]["timeTable"]["lesson"] > timetable[j]["timeTable"]["lesson"]:
temp = timetable[i]
timetable[i] = timetable[j]
timetable[j] = temp
for i in timetable:
if not "קפ'" in i["groupDetails"]["subjectName"]: # We don't need that. It's useless.
if len(day) > 0:
while i["timeTable"]["lesson"] > day[-1].number + 1:
day.append(Lesson(
lesson="",
lesson_number=day[-1].number + 1,
lesson_time="",
classroom="",
teacher="",
)
)
i["groupDetails"]["groupTeachers"][0]["teacherName"] = i["groupDetails"]["groupTeachers"][0]["teacherName"].replace("-", " ")
day.append(Lesson(
lesson=i["groupDetails"]["subjectName"],
lesson_number=i["timeTable"]["lesson"],
lesson_time="",
classroom=i["timeTable"]["roomNum"],
teacher=i["groupDetails"]["groupTeachers"][0]["teacherName"]
)
)
return Day(day_num, day)
def get_today(self):
"""
Parameters
------------
------------
"""
today = datetime.now().weekday()
today += 2
if today > 7:
today -= 7
return self.get_day(today)
| 2.703125 | 3 |
lab6.py | jschmidtnj/CS115 | 0 | 6784 | '''
Created on 10/11/2017
@author: <EMAIL>
Pledge: I pledge my honor that I have abided by the Stevens Honor System -<NAME>
CS115 - Lab 6
'''
def isOdd(n):
'''Returns whether or not the integer argument is odd.'''
#question 1: base_2 of 42: 101010
if n == 0:
return False
if n % 2 != 0:
return True
return False
#question 2: if given an odd base-10 number, the least-significant bit of its base-2 representation will be a 1.
#question 3: if given an even base-10 number, the least-significant bit of its base-2 representation will be a 0.
#This is because 2^0 = 1, and that is the only way to make an odd number, by having a 1 in the least significant bit.
#question 4: By eliminating the least significant bit, the original number decreases by a factor of 2, if the bit is a 0.
#if the least significant bit is a 1, the original number is decreased by a factor of 2, - 1.
#question 5: If N is odd, the base-2 of N is Y + "1". If N is even, the base-2 of N is Y + "0".
#This is because to get from N base-10 to N base-2 you do successive division by 2, keeping the remainder, so given
#the base-2 of all of the division except for the first, one must put that remainder in front, hence the answer given.
def numToBinary(n):
'''Precondition: integer argument is non-negative.
Returns the string with the binary representation of non-negative integer n.
If n is 0, the empty string is returned.'''
if n == 0:
return ""
elif isOdd(n):
return numToBinary(n // 2) + "1"
else: return numToBinary(n // 2) + "0"
#print(numToBinary(15))
def binaryToNum(s):
'''Precondition: s is a string of 0s and 1s.
Returns the integer corresponding to the binary representation in s.
Note: the empty string represents 0.'''
if s == "":
return 0
return int(s[0])*(2**(len(s)-1)) + binaryToNum(s[1:])
#print(binaryToNum("1111"))
def addBin(s, numAdd, carry = 0):
"""adds 2 binary numbers"""
if s == "" or numAdd == "":
if carry == 0:
return s + numAdd
place = carry
carry = 0
if s != "" and s[-1] == "1":
carry = place
place = 1 - place
if numAdd != "" and numAdd[-1] == "1":
carry += place
place = 1 - place
return addBin(s[:-1], numAdd[:-1], carry) + str(place)
#print(addBin("100", "001", 0))
def makeEightBit(a):
"""makes a binary number 8 bit"""
if len(a) == 8:
print(str(a))
return str(a)
elif len(a) > 8:
#print(a[(len(a)-8):])
makeEightBit(a[(len(a)-8):])
else:
makeEightBit("0" + a)
return ""
def increment(s):
'''Precondition: s is a string of 8 bits.
Returns the binary representation of binaryToNum(s) + 1.'''
#numAdd = "00000001"
dec = binaryToNum(s)
dec += 1
answer = numToBinary(dec)
#print(answer)
if len(answer) > 8:
return answer[(len(answer)-8):]
answer = (8-len(answer))*"0" + answer
return answer
#print(increment("1110100000"))
def count(s, n):
'''Precondition: s is an 8-bit string and n >= 0.
Prints s and its n successors.'''
if n == 0:
print(s)
return ""
print(s)
return count(increment(s), n-1)
#print(count("11111110", 5))
#print("a")
def numToTernary(n):
'''Precondition: integer argument is non-negative.
Returns the string with the ternary representation of non-negative integer
n. If n is 0, the empty string is returned.'''
if n == 0:
return ""
return numToTernary(n // 3) + str(n % 3)
#print(numToTernary(42))
def ternaryToNum(s):
'''Precondition: s is a string of 0s, 1s, and 2s.
Returns the integer corresponding to the ternary representation in s.
Note: the empty string represents 0.'''
if s == "":
return 0
return int(s[0])*(3**(len(s)-1)) + ternaryToNum(s[1:])
#print(ternaryToNum('12211010'))
| 3.734375 | 4 |
clover.py | imyz/25000 | 8 | 6785 | #!/usr/bin/env python
from math import *
import sys
def rotate(x, y, degrees):
c = cos(pi * degrees / 180.0)
s = sin(pi * degrees / 180.0)
return x * c + y * s, y * c - x * s
def move(verb, **kwargs):
keys = kwargs.keys()
keys.sort()
words = [verb.upper()]
for key in keys:
words.append('%s%g' % (key.upper(), kwargs[key]))
print ' '.join(words)
def travel(**kwargs): move('G0', **kwargs)
def linear(**kwargs): move('G1', **kwargs)
def clockwise(**kwargs): move('G2', **kwargs)
def up(): travel(z=8)
def down(): linear(z=-2)
def jump(**kwargs):
up()
travel(**kwargs)
down()
frame_width = 200
frame_height = 75
drill = 1.6 # 1/16 inch radius.
extrusion = 15
motor_screw_grid = 31
motor_cutout_diameter = 22
motor_width = 42.2
motor_offset = 35 # Motor face to extrusion.
motor_side, motor_bend = rotate(0, motor_offset + extrusion, 30)
motor_side += extrusion/2
motor_side += extrusion/cos(pi/6)
mc = motor_cutout_diameter/2 + drill
#nema23 = 47.14 # Mounting screws center-to-center
clover = 6
thickness = 0.0478 * 25.4 # 18 gauge steel.
enable_perimeter = False
print >> sys.stderr, 'thickness', thickness
print >> sys.stderr, 'motor_bend', motor_bend
print >> sys.stderr, 'motor_side', motor_side
print >> sys.stderr, 'mc', mc
print >> sys.stderr, 'extrusion-to-extrusion', frame_width
print >> sys.stderr, 'edge-to-edge', frame_width + 2*extrusion
xa = motor_side - drill # Outside wings start
xb = motor_side + motor_bend + drill
xs1 = xa + extrusion/2 # Extrusion screws
xs2 = xb - extrusion/2
# xe = frame_width/2 # Extrusion corner
xt = motor_width/2
xms = motor_screw_grid/sqrt(2)
xgs = 19
ya = frame_height/2 + drill # Top without flange
yb = frame_height/2 + drill - extrusion
ys = frame_height/2 - extrusion/2 # Extrusion screws
yt = motor_width/2
yt2 = yt + 4
yms = xms
ygs = xgs
s2 = sqrt(2)
print 'G17 ; Select XY plane for arcs'
print 'G90 ; Absolute coordinates'
move('G92', x=0, y=0, z=0)
linear(x=0, y=0, z=0)
print '; Gasket screw holes'
for x in (-xgs, xgs):
for y in (-x, x):
jump(x=x, y=y)
# clockwise(i=1)
if enable_perimeter:
print '; Horizontal extrusion screw holes'
for x in (xs1, xs2):
jump(x=x, y=ys)
for x in (xs2, xs1, -xs1, -xs2):
jump(x=x, y=-ys)
for x in (-xs2, -xs1):
jump(x=x, y=ys)
#print '; 22mm dia cutout for reference'
#jump(x=0, y=11)
#clockwise(j=-11)
#print '; NEMA17 square for reference'
#jump(x=0, y=yt*s2)
#linear(x=xt*s2, y=0)
#linear(x=0, y=-yt*s2)
#linear(x=-xt*s2, y=0)
#linear(x=0, y=yt*s2)
def clovercut(z):
up()
travel(x=-clover+1, y=yms-clover-1)
linear(z=z)
print '; Motor cutout clover leaf'
linear(x=-clover, y=yms-clover)
clockwise(x=clover, i=clover, j=clover)
#clockwise(x=xms-clover, y=clover, r=mc)
linear(x=xms-clover, y=clover, r=mc)
clockwise(y=-clover, i=clover, j=-clover)
#clockwise(x=clover, y=-yms+clover, r=mc)
linear(x=clover, y=-yms+clover, r=mc)
clockwise(x=-clover, i=-clover, j=-clover)
#clockwise(x=-xms+clover, y=-clover, r=mc)
linear(x=-xms+clover, y=-clover, r=mc)
clockwise(y=clover, i=-clover, j=clover)
#clockwise(x=-clover, y=yms-clover, r=mc)
linear(x=-clover, y=yms-clover, r=mc)
linear(x=-clover+1, y=yms-clover+1)
for z in (-1, -2.5):
clovercut(z)
def perimeter(z):
up()
travel(x=xa, y=yb)
linear(z=z)
print '; Right wing (outside horizontal extrusions)'
clockwise(x=xa+extrusion, y=ya, i=extrusion)
linear(x=xb)
linear(y=-ya)
linear(x=xa+extrusion)
clockwise(x=xa, y=-yb, j=extrusion)
print '; Extrusion pass-through and motor mounting plate'
linear(x=xa-20)
clockwise(x=-xa+20, i=-xa+20, j=yb)
linear(x=-xa, y=-yb)
print '; Left wing (outside horizontal extrusions)'
clockwise(x=-xa-extrusion, y=-ya, i=-extrusion)
linear(x=-xb)
linear(y=ya)
linear(x=-xa-extrusion)
clockwise(x=-xa, y=yb, j=-extrusion)
print '; Extrusion pass-through and motor mounting plate'
linear(x=-xa+20)
clockwise(x=xa-20, i=xa-20, j=-yb)
linear(x=xa, y=yb)
if enable_perimeter:
for z in (-1, -2.5):
perimeter(z)
print '; All done'
up()
| 2.953125 | 3 |
scripts/mnist_inference.py | asiakaczmar/noise2self | 0 | 6786 | import torch
from torchvision.datasets import MNIST
from torchvision import transforms
from torch.utils.data import DataLoader
from scripts.utils import SyntheticNoiseDataset
from models.babyunet import BabyUnet
CHECKPOINTS_PATH = '../checkpoints/'
mnist_test = MNIST('../inferred_data/MNIST', download=True,
transform=transforms.Compose([
transforms.ToTensor(),
]), train=False)
noisy_mnist_test = SyntheticNoiseDataset(mnist_test, 'test')
data_loader = DataLoader(noisy_mnist_test, batch_size=256, shuffle=True)
for x in range(0, 200, 10):
trained_model = BabyUnet()
trained_model.load_state_dict( CHECKPOINTS_PATH + 'model' + str(x))
trained_model.eval()
for i, batch in enumerate(data_loader):
denoised = trained_model(batch)
break()
np.save(denoised.numpy(), '../inferred_data/model' + str(x) + '.npz')
| 2.46875 | 2 |
src/processing/augmentation.py | sdcubber/kaggle_carvana | 0 | 6787 | # Script for data augmentation functions
import numpy as np
from collections import deque
from PIL import Image
import cv2
from data.config import *
def imread_cv2(image_path):
"""
Read image_path with cv2 format (H, W, C)
if image is '.gif' outputs is a numpy array of {0,1}
"""
image_format = image_path[-3:]
if image_format == 'jpg':
image = cv2.imread(image_path)
else:
image = np.array(Image.open(image_path))
return image
def resize_cv2(image, heigh=1280, width=1918):
return cv2.resize(image, (width, heigh), cv2.INTER_LINEAR)
def image_to_tensor(image, mean=0, std=1.):
"""Transform image (input is numpy array, read in by cv2) """
if len(image.shape) == 2:
image = image.reshape(image.shape[0], image.shape[1], 1)
image = image.astype(np.float32)
image = (image-mean)/std
image = image.transpose((2,0,1))
tensor = torch.from_numpy(image)
return tensor
# --- Data Augmentation functions --- #
# A lot of functions can be found here:
# https://github.com/fchollet/keras/blob/master/keras/preprocessing/image.py#L223
# transform image and label
def randomHorizontalFlip(image, mask, p=0.5):
"""Do a random horizontal flip with probability p"""
if np.random.random() < p:
image = np.fliplr(image)
mask = np.fliplr(mask)
return image, mask
def randomVerticalFlip(image, mask, p=0.5):
"""Do a random vertical flip with probability p"""
if np.random.random() < p:
image = np.flipud(image)
mask = np.flipud(mask)
return image, mask
def randomHorizontalShift(image, mask, max_shift=0.05, p=0.5):
"""Do random horizontal shift with max proportion shift and with probability p
Elements that roll beyond the last position are re-introduced at the first."""
max_shift_pixels = int(max_shift*image.shape[1])
shift = np.random.choice(np.arange(-max_shift_pixels, max_shift_pixels+1))
if np.random.random() < p:
image = np.roll(image, shift, axis=1)
mask = np.roll(mask, shift, axis=1)
return image, mask
def randomVerticalShift(image, mask, max_shift=0.05, p=0.5):
"""Do random vertical shift with max proportion shift and probability p
Elements that roll beyond the last position are re-introduced at the first."""
max_shift_pixels = int(max_shift*image.shape[0])
shift = np.random.choice(np.arange(-max_shift_pixels, max_shift_pixels+1))
if np.random.random() < p:
image = np.roll(image, shift, axis=0)
mask = np.roll(mask, shift, axis=0)
return image, mask
def randomInvert(image, mask, p=0.5):
"""Randomly invert image with probability p"""
if np.random.random() < p:
image = 255 - image
mask = mask
return image, mask
def randomBrightness(image, mask, p=0.75):
"""With probability p, randomly increase or decrease brightness.
See https://stackoverflow.com/questions/37822375/python-opencv-increasing-image-brightness-without-overflowing-uint8-array"""
if np.random.random() < p:
max_value = np.percentile(255-image, q=25) # avoid burning out white cars, so take image-specific maximum
value = np.random.choice(np.arange(-max_value, max_value))
if value > 0:
image = np.where((255 - image) < value,255,image+value).astype(np.uint8)
else:
image = np.where(image < -value,0,image+value).astype(np.uint8)
return image, mask
def randomHue(image, mask, p=0.25, max_value=75):
"""With probability p, randomly increase or decrease hue.
See https://stackoverflow.com/questions/32609098/how-to-fast-change-image-brightness-with-python-opencv"""
if np.random.random() < p:
value = np.random.choice(np.arange(-max_value, max_value))
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
hsv[:,:,0] = hsv[:,:,0] + value
hsv = np.clip(hsv, a_min=0, a_max=255).astype(np.uint8)
image = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
return image, mask
def GaussianBlur(image, mask, kernel=(1, 1),sigma=1, p=0.5):
"""With probability p, apply Gaussian blur"""
# TODO
return image, mask
def randomRotate(image, mask, max_angle, p=0.5):
"""Perform random rotation with max_angle and probability p"""
# TODO
return(image, mask)
| 3.296875 | 3 |
substitute_finder/templatetags/substitute_finder_extra.py | tohugaby/pur_beurre_web | 1 | 6788 | <reponame>tohugaby/pur_beurre_web<gh_stars>1-10
"""
substitute_finder app custom templatetags module
"""
from django import template
register = template.Library()
@register.filter
def range_tag(value, min_value=0):
"""
tag that return a range
"""
if value:
return range(min_value, value)
return range(min_value)
| 2.171875 | 2 |
terrafirma/core/views/env.py | AlexandraAlter/django-terrafirma | 0 | 6789 | <filename>terrafirma/core/views/env.py
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse_lazy
from django import views
from django.views import generic as g_views
from django.views.generic import base as b_views, edit as e_views
from .. import forms, models
class NewEnvView(e_views.CreateView):
model = models.Environment
fields = ['name', 'abbrev']
success_url = reverse_lazy('home')
class EnvMixin(b_views.ContextMixin):
def setup(self, request, *args, **kwargs):
super().setup(request, *args, **kwargs)
self.env = get_object_or_404(models.Environment, abbrev=kwargs['env_abbrev'])
def url_vars(self):
return {'env_abbrev': self.env.abbrev}
def get_context_data(self, **kwargs):
return super().get_context_data(env=self.env, **kwargs)
class MaybeEnvMixin(b_views.ContextMixin):
def setup(self, request, *args, **kwargs):
super().setup(request, *args, **kwargs)
self.env = models.Environment.objects.get(abbrev=request.GET['env'])
def url_vars(self):
return {'env_abbrev': self.env.abbrev if self.env else None}
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if self.env:
context.update(env=self.env)
return context
class EnvView(EnvMixin, g_views.DetailView):
model = models.Environment
slug_field = 'abbrev'
slug_url_kwarg = 'env_abbrev'
class EditEnvView(EnvMixin, e_views.UpdateView):
model = models.Environment
fields = ['name', 'abbrev']
slug_field = 'abbrev'
slug_url_kwarg = 'env_abbrev'
def setup(self, request, *args, **kwargs):
super().setup(request, *args, **kwargs)
self.object = self.env
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.save()
return redirect('env', env_abbrev=self.env.abbrev)
| 2.234375 | 2 |
geoviz/__init__.py | JustinGOSSES/geoviz | 6 | 6790 | from load_las_data import LoadLasData
from altair_log_plot import AltAirLogPlot
from load_shapefile_data import LoadShpData
from alitair_well_location_map import WellLocationMap
| 1.078125 | 1 |
core/data/utils.py | ahmad-PH/auto_lcc | 2 | 6791 | <reponame>ahmad-PH/auto_lcc<filename>core/data/utils.py
import pickle
import pandas as pd
from typing import List, Tuple
def load_libofc_df(data_path):
def tuple_to_df(data: List[Tuple]) -> pd.DataFrame:
return pd.DataFrame(data, columns=["class", "title", "synopsis", "id"])
with open(data_path, 'rb') as f:
classes = pickle.load(f)
train = pickle.load(f)
test = pickle.load(f)
return classes, tuple_to_df(train), tuple_to_df(test)
| 2.875 | 3 |
apps/users/adminx.py | hhdMrLion/mxshop-api | 0 | 6792 | <reponame>hhdMrLion/mxshop-api<filename>apps/users/adminx.py
import xadmin
from users.models import VerifyCode
from xadmin import views
class BaseSetting(object):
# 添加主题功能
enable_themes = True
user_bootswatch = True
class GlobalSettings(object):
# 全局配置,后端管理标题和页脚
site_title = '天天生鲜后台管理'
site_footer = 'https://www.qnmlgb.top/'
# 菜单收缩
menu_style = 'accordion'
class VerifyCodeAdmin(object):
list_display = ['code', 'mobile', 'add_time']
xadmin.site.register(VerifyCode, VerifyCodeAdmin)
xadmin.site.register(views.BaseAdminView, BaseSetting)
xadmin.site.register(views.CommAdminView, GlobalSettings)
| 1.773438 | 2 |
Archive/train_cnn.py | Yeok-c/Urban-Sound-Classification | 0 | 6793 | ### Load necessary libraries ###
import numpy as np
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score
import tensorflow as tf
from tensorflow import keras
from sklearn.metrics import ConfusionMatrixDisplay
model = get_network()
model.summary()
### Train and evaluate via 10-Folds cross-validation ###
accuracies = []
folds = np.array(['fold1','fold2','fold3','fold4',
'fold5','fold6','fold7','fold8',
'fold9','fold10'])
load_dir = "UrbanSounds8K/processed/"
kf = KFold(n_splits=10)
for train_index, test_index in kf.split(folds):
x_train, y_train = [], []
for ind in train_index:
# read features or segments of an audio file
train_data = np.load("{0}/{1}.npz".format(load_dir,folds[ind]),
allow_pickle=True)
# for training stack all the segments so that they are treated as an example/instance
features = np.concatenate(train_data["features"], axis=0)
labels = np.concatenate(train_data["labels"], axis=0)
x_train.append(features)
y_train.append(labels)
# stack x,y pairs of all training folds
x_train = np.concatenate(x_train, axis = 0).astype(np.float32)
y_train = np.concatenate(y_train, axis = 0).astype(np.float32)
# for testing we will make predictions on each segment and average them to
# produce single label for an entire sound clip.
test_data = np.load("{0}/{1}.npz".format(load_dir,
folds[test_index][0]), allow_pickle=True)
x_test = test_data["features"]
y_test = test_data["labels"]
log_dir="logs/fit/" + folds[test_index][0]
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
model = get_network()
model.fit(x_train, y_train, epochs = 20, batch_size = 64, verbose = 1, validation_split=0.2,
use_multiprocessing=True, workers=8, callbacks=[tensorboard_callback])
# evaluate on test set/fold
y_true, y_pred = [], []
for x, y in zip(x_test, y_test):
# average predictions over segments of a sound clip
avg_p = np.argmax(np.mean(model.predict(x), axis = 0))
y_pred.append(avg_p)
# pick single label via np.unique for a sound clip
y_true.append(np.unique(y)[0])
accuracies.append(accuracy_score(y_true, y_pred))
print("Fold n accuracy: {0}".format(accuracy_score(y_true, y_pred)))
cm = ConfusionMatrixDisplay.from_predictions(y_true, y_pred)
cm.figure_.savefig('conf_mat_' + str(test_index) + '_acc_' + str(accuracy_score(y_true, y_pred)) + '.png',dpi=1000)
print("Average 10 Folds Accuracy: {0}".format(np.mean(accuracies)))
| 2.515625 | 3 |
Python3/1436-Destination-City/soln.py | wyaadarsh/LeetCode-Solutions | 5 | 6794 | <filename>Python3/1436-Destination-City/soln.py
class Solution:
def destCity(self, paths: List[List[str]]) -> str:
bads = set()
cities = set()
for u, v in paths:
cities.add(u)
cities.add(v)
bads.add(u)
ans = cities - bads
return list(ans)[0]
| 2.859375 | 3 |
metaflow/plugins/kfp/tests/flows/resources_flow.py | zillow/metaflow | 7 | 6795 | import os
import pprint
import subprocess
import time
from typing import Dict, List
from kubernetes.client import (
V1EnvVar,
V1EnvVarSource,
V1ObjectFieldSelector,
V1ResourceFieldSelector,
)
from metaflow import FlowSpec, step, environment, resources, current
def get_env_vars(env_resources: Dict[str, str]) -> List[V1EnvVar]:
res = []
for name, resource in env_resources.items():
res.append(
V1EnvVar(
# this is used by some functions of operator-sdk
# it uses this environment variable to get the pods
name=name,
value_from=V1EnvVarSource(
resource_field_ref=V1ResourceFieldSelector(
container_name="main",
resource=resource,
divisor="1m" if "cpu" in resource else "1",
)
),
)
)
return res
kubernetes_vars = get_env_vars(
{
"LOCAL_STORAGE": "requests.ephemeral-storage",
"LOCAL_STORAGE_LIMIT": "limits.ephemeral-storage",
"CPU": "requests.cpu",
"CPU_LIMIT": "limits.cpu",
"MEMORY": "requests.memory",
"MEMORY_LIMIT": "limits.memory",
}
)
kubernetes_vars.append(
V1EnvVar(
name="MY_POD_NAME",
value_from=V1EnvVarSource(
field_ref=V1ObjectFieldSelector(field_path="metadata.name")
),
)
)
annotations = {
"metaflow.org/flow_name": "MF_NAME",
"metaflow.org/step": "MF_STEP",
"metaflow.org/run_id": "MF_RUN_ID",
"metaflow.org/experiment": "MF_EXPERIMENT",
"metaflow.org/tag_metaflow_test": "MF_TAG_METAFLOW_TEST",
"metaflow.org/tag_test_t1": "MF_TAG_TEST_T1",
}
for annotation, env_name in annotations.items():
kubernetes_vars.append(
V1EnvVar(
name=env_name,
value_from=V1EnvVarSource(
field_ref=V1ObjectFieldSelector(
field_path=f"metadata.annotations['{annotation}']"
)
),
)
)
labels = {
"aip.zillowgroup.net/kfp-pod-default": "KF_POD_DEFAULT",
"tags.ledger.zgtools.net/ai-flow-name": "AI_FLOW_NAME",
"tags.ledger.zgtools.net/ai-step-name": "AI_STEP_NAME",
"tags.ledger.zgtools.net/ai-experiment-name": "AI_EXPERIMENT_NAME",
}
for label, env_name in labels.items():
kubernetes_vars.append(
V1EnvVar(
name=env_name,
value_from=V1EnvVarSource(
field_ref=V1ObjectFieldSelector(
field_path=f"metadata.labels['{label}']"
)
),
)
)
class ResourcesFlow(FlowSpec):
@resources(
local_storage="242",
cpu="0.6",
memory="1G",
)
@environment( # pylint: disable=E1102
vars={"MY_ENV": "value"}, kubernetes_vars=kubernetes_vars
)
@step
def start(self):
pprint.pprint(dict(os.environ))
print("=====")
# test simple environment var
assert os.environ.get("MY_ENV") == "value"
# test kubernetes_vars
assert "resourcesflow" in os.environ.get("MY_POD_NAME")
assert os.environ.get("CPU") == "600"
assert os.environ.get("CPU_LIMIT") == "600"
assert os.environ.get("LOCAL_STORAGE") == "242000000"
assert os.environ.get("LOCAL_STORAGE_LIMIT") == "242000000"
assert os.environ.get("MEMORY") == "1000000000"
assert os.environ.get("MEMORY_LIMIT") == "1000000000"
assert os.environ.get("MF_NAME") == current.flow_name
assert os.environ.get("MF_STEP") == current.step_name
assert os.environ.get("MF_RUN_ID") == current.run_id
assert os.environ.get("MF_EXPERIMENT") == "metaflow_test"
assert os.environ.get("MF_TAG_METAFLOW_TEST") == "true"
assert os.environ.get("MF_TAG_TEST_T1") == "true"
assert os.environ.get("KF_POD_DEFAULT") == "true"
assert os.environ.get("AI_FLOW_NAME") == current.flow_name
assert os.environ.get("AI_STEP_NAME") == current.step_name
assert os.environ.get("AI_EXPERIMENT_NAME") == "metaflow_test"
self.items = [1, 2]
self.next(self.foreach_step, foreach="items")
@environment(vars={"MY_ENV": "value"}) # pylint: disable=E1102
@resources(volume="11G")
@step
def foreach_step(self):
# test simple environment var
assert os.environ.get("MY_ENV") == "value"
output = subprocess.check_output(
"df -h | grep /opt/metaflow_volume", shell=True
)
assert "11G" in str(output)
self.next(self.join_step)
@resources(volume="12G")
@step
def join_step(self, inputs):
output = subprocess.check_output(
"df -h | grep /opt/metaflow_volume", shell=True
)
assert "12G" in str(output)
self.next(self.split_step)
@step
def split_step(self):
self.items = [1, 2]
self.next(self.shared_volume_foreach_step, foreach="items")
@resources(volume="13G", volume_mode="ReadWriteMany")
@step
def shared_volume_foreach_step(self):
output = subprocess.check_output(
"df -h | grep /opt/metaflow_volume", shell=True
)
assert "13G" in str(output)
file_path = "/opt/metaflow_volume/test.txt"
message = "hello world!"
# validate the volume is shared across the foreach splits
if self.input == 1:
with open(file_path, "w") as f:
f.write(message)
else:
while not os.path.exists(file_path):
time.sleep(1)
print(".")
with open(file_path, "r") as f:
read_lines = f.readlines()
print("read_lines", read_lines)
assert message == read_lines[0]
self.next(self.shared_volume_join_step)
@step
def shared_volume_join_step(self, inputs):
self.next(self.end)
@step
def end(self):
print("All done.")
if __name__ == "__main__":
ResourcesFlow()
| 2.234375 | 2 |
src/nb_utils/general.py | redfrexx/osm_association_rules | 0 | 6796 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions used for data handling
"""
__author__ = "<NAME>, GIScience Research Group, Heidelberg University"
__email__ = "<EMAIL>"
import os
import yaml
from shapely.geometry import box
import numpy as np
import pandas as pd
import geopandas as gpd
import json
from nb_utils.utils import create_bbox, reproject_to_utm
CONTEXT_NAMES = {"area": "Area", "building_density": "Building density", "age": "Days since creation",
"n_tags": "Number of tags", "changes": "Number of changes", "max_version": "Version number",
"user_count_inner": "Inner user count", "user_density_inner": "Inner user density",
"user_count_outer": "Outer user count", "user_density_outer": "Outer user density",
"feature_count": "Feature count", "random": "Random"}
rules_colnames = ['antecedents', 'consequents', 'antecedent support',
'consequent support', 'support', 'confidence', 'lift', 'leverage',
'conviction', "context", "context_min", "context_max", "context_p_min", "context_p_max", "nfeatures", "rule"]
pretty_names_units = {"area": "Area [ha]", "building_density": "Building density", "feature_count": "Feature count", "age": "Days since creation", "n_tags": "Number of tags", "changes": "Number of changes", "max_version": "Version number", "user_count_inner": "Inner user count", "user_density_inner": "Inner user density", "user_count_outer": "Outer user count",
"user_density_outer": "Outer user density", "random": "Random"}
def load_config(config_file, cities):
"""
Load config parameters from file
:param config_file:
:param cities:
:return:
"""
if not os.path.exists(config_file):
print("ERROR: Config file {} does not exist.".format(config_file))
else:
with open(config_file, 'r') as src:
config = yaml.load(src, Loader=yaml.FullLoader)
config_cities = config["locations"]
config_cities = {city: config_cities[city] for city in cities}
return config_cities
def load_data(cities, data_dir):
"""
Load data into notebook from file
:return:
"""
loaded_tags_dfs = []
loaded_context_dfs = []
for city in cities:
print("Loading {}...".format(city))
# Check paths
tags_file = os.path.join(data_dir, city, "{}_tags.json".format(city))
context_file = os.path.join(data_dir, city, "{}_context.geojson".format(city))
if (not os.path.exists(tags_file)) or (not os.path.exists(context_file)):
print("{}: Input files not found.".format(city))
return None, None, None
# Read data and set index
tags_df = pd.read_json(tags_file).set_index("@osmId")
context_df = gpd.read_file(context_file).set_index("@osmId")
# Calculate area (should be moved to data_extraction)
context_df["area"] = reproject_to_utm(context_df).area #/ 10000. # conversion to ha
# Add column holding the city name
context_df["city"] = city
loaded_tags_dfs.append(tags_df)
loaded_context_dfs.append(context_df)
# Convert list of dataframes to dataframe
all_tags_df = pd.concat(loaded_tags_dfs, axis=0)
all_tags_df = all_tags_df.fillna(False)
all_context_df = pd.concat(loaded_context_dfs, axis=0)
all_features = all_context_df.join(all_tags_df, sort=False)
# Add dummy columns for "no antecedent" and random context variable
all_features["none"] = True
all_features["random"] = np.random.rand(len(all_features))
# The park iteself is always counted as an objects inside of it. Therefore, subtract 1.
all_features["feature_count"] = all_features["feature_count"] - 1
# Delete unnecessary columns
unnecessary_cols = list(filter(lambda x: x.startswith("gt:"), all_features.columns)) + ["leisure=park"]
all_features.drop(unnecessary_cols, axis=1, inplace=True)
return all_features
def create_city_bboxes(config_cities):
"""
Creat bboxes of cities
:return:
"""
bboxes = {c: box(*create_bbox(config_cities[c]["center"], config_cities[c]["width"])) for c in config_cities.keys()}
bbox_df = pd.DataFrame().from_dict(bboxes, orient="index", columns=["geometry"])
return gpd.GeoDataFrame(bbox_df)
def dump_city_rules(city_rules, interim_dir):
"""
Write results from context based association rule analysis to file
:param city_rules:
:param interim_dir:
:return:
"""
city_rules_dir = os.path.join(interim_dir, "city_rules")
if not os.path.exists(city_rules_dir):
os.mkdir(city_rules_dir)
for k, v in city_rules.items():
print(k)
v["heatmap"].to_json(os.path.join(city_rules_dir, "{}_heatmap.json".format(k)))
v["valid_rules"].reset_index().to_json(os.path.join(city_rules_dir, "{}_valid_rules.json".format(k)))
with open(os.path.join(city_rules_dir, "{}_sel_features.json".format(k)), "w") as dst:
json.dump(list(v["sel_features"].index), dst)
def load_city_rules(cities, interim_dir, all_features):
"""
Load results from context based association rule analysis to file
:param cities:
:param interim_dir:
:param all_features:
:return:
"""
city_rules = {}
for city in cities:
with open(os.path.join(interim_dir, "city_rules", "{}_sel_features.json".format(city))) as dst:
selected_ids = json.load(dst)
sel_features = all_features.loc[selected_ids]
selected_osmids = json
city_rules[city] = {
"heatmap": pd.read_json(os.path.join(interim_dir, "city_rules", "{}_heatmap.json".format(city))),
"valid_rules": pd.read_json(
os.path.join(interim_dir, "city_rules", "{}_valid_rules.json".format(city))).set_index("index"),
"sel_features": sel_features}
return city_rules
| 2.296875 | 2 |
keystoneclient/auth/identity/v3/federated.py | darren-wang/ksc | 1 | 6797 | <filename>keystoneclient/auth/identity/v3/federated.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo_config import cfg
import six
from keystoneclient.auth.identity.v3 import base
from keystoneclient.auth.identity.v3 import token
__all__ = ['FederatedBaseAuth']
@six.add_metaclass(abc.ABCMeta)
class FederatedBaseAuth(base.BaseAuth):
rescoping_plugin = token.Token
def __init__(self, auth_url, identity_provider, protocol, **kwargs):
"""Class constructor accepting following parameters:
:param auth_url: URL of the Identity Service
:type auth_url: string
:param identity_provider: name of the Identity Provider the client
will authenticate against. This parameter
will be used to build a dynamic URL used to
obtain unscoped OpenStack token.
:type identity_provider: string
"""
super(FederatedBaseAuth, self).__init__(auth_url=auth_url, **kwargs)
self.identity_provider = identity_provider
self.protocol = protocol
@classmethod
def get_options(cls):
options = super(FederatedBaseAuth, cls).get_options()
options.extend([
cfg.StrOpt('identity-provider',
help="Identity Provider's name"),
cfg.StrOpt('protocol',
help='Protocol for federated plugin'),
])
return options
@property
def federated_token_url(self):
"""Full URL where authorization data is sent."""
values = {
'host': self.auth_url.rstrip('/'),
'identity_provider': self.identity_provider,
'protocol': self.protocol
}
url = ("%(host)s/OS-FEDERATION/identity_providers/"
"%(identity_provider)s/protocols/%(protocol)s/auth")
url = url % values
return url
def _get_scoping_data(self):
return {'trust_id': self.trust_id,
'domain_id': self.domain_id,
'domain_name': self.domain_name,
'project_id': self.project_id,
'project_name': self.project_name,
'project_domain_id': self.project_domain_id,
'project_domain_name': self.project_domain_name}
def get_auth_ref(self, session, **kwargs):
"""Authenticate retrieve token information.
This is a multi-step process where a client does federated authn
receives an unscoped token.
If an unscoped token is successfully received and scoping information
is present then the token is rescoped to that target.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:returns: a token data representation
:rtype: :py:class:`keystoneclient.access.AccessInfo`
"""
auth_ref = self.get_unscoped_auth_ref(session)
scoping = self._get_scoping_data()
if any(scoping.values()):
token_plugin = self.rescoping_plugin(self.auth_url,
token=auth_ref.auth_token,
**scoping)
auth_ref = token_plugin.get_auth_ref(session)
return auth_ref
@abc.abstractmethod
def get_unscoped_auth_ref(self, session, **kwargs):
"""Fetch unscoped federated token."""
| 1.96875 | 2 |
bin/Python27/Lib/site-packages/tables/utilsExtension.py | lefevre-fraser/openmeta-mms | 0 | 6798 | from warnings import warn
from tables.utilsextension import *
_warnmsg = ("utilsExtension is pending deprecation, import utilsextension instead. "
"You may use the pt2to3 tool to update your source code.")
warn(_warnmsg, DeprecationWarning, stacklevel=2)
| 1.304688 | 1 |
config.py | iDevHank/i18n | 0 | 6799 | <gh_stars>0
#!/usr/bin/env python3
# The format of your own localizable method.
# This is an example of '"string".localized'
SUFFIX = '.localized'
KEY = r'"(?:\\.|[^"\\])*"'
LOCALIZABLE_RE = r'%s%s' % (KEY, SUFFIX)
# Specify the path of localizable files in project.
LOCALIZABLE_FILE_PATH = ''
LOCALIZABLE_FILE_NAMES = ['Localizable']
LOCALIZABLE_FILE_TYPES = ['strings']
# File types of source file.
SEARCH_TYPES = ['swift', 'm', 'json']
SOURCE_FILE_EXCLUSIVE_PATHS = [
'Assets.xcassets', 'Carthage', 'ThirdParty',
'Pods', 'Media.xcassets', 'Framework', 'bin']
LOCALIZABLE_FILE_EXCLUSIVE_PATHS = ['Carthage', 'ThirdParty',
'Pods', 'Framework', 'bin']
LOCALIZABLE_FORMAT_RE = r'"(?:\\.|[^"\\])*"\s*=\s*"(?:\\.|[^"\\])*";\n'
DEFAULT_TARGET_PATH = 'generated.strings'
| 2.09375 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.