index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
6,942 | growthbook | __init__ | null | def __init__(self, value: Dict, ttl: int) -> None:
self.value = value
self.ttl = ttl
self.expires = time() + ttl
| (self, value: Dict, ttl: int) -> NoneType |
6,943 | growthbook | update | null | def update(self, value: Dict):
self.value = value
self.expires = time() + self.ttl
| (self, value: Dict) |
6,944 | cryptography.hazmat.primitives.ciphers.base | Cipher | null | class Cipher(typing.Generic[Mode]):
def __init__(
self,
algorithm: CipherAlgorithm,
mode: Mode,
backend: typing.Any = None,
) -> None:
if not isinstance(algorithm, CipherAlgorithm):
raise TypeError("Expected interface of CipherAlgorithm.")
if mode is not None:
# mypy needs this assert to narrow the type from our generic
# type. Maybe it won't some time in the future.
assert isinstance(mode, modes.Mode)
mode.validate_for_algorithm(algorithm)
self.algorithm = algorithm
self.mode = mode
@typing.overload
def encryptor(
self: Cipher[modes.ModeWithAuthenticationTag],
) -> AEADEncryptionContext:
...
@typing.overload
def encryptor(
self: _CIPHER_TYPE,
) -> CipherContext:
...
def encryptor(self):
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
if self.mode.tag is not None:
raise ValueError(
"Authentication tag must be None when encrypting."
)
from cryptography.hazmat.backends.openssl.backend import backend
ctx = backend.create_symmetric_encryption_ctx(
self.algorithm, self.mode
)
return self._wrap_ctx(ctx, encrypt=True)
@typing.overload
def decryptor(
self: Cipher[modes.ModeWithAuthenticationTag],
) -> AEADDecryptionContext:
...
@typing.overload
def decryptor(
self: _CIPHER_TYPE,
) -> CipherContext:
...
def decryptor(self):
from cryptography.hazmat.backends.openssl.backend import backend
ctx = backend.create_symmetric_decryption_ctx(
self.algorithm, self.mode
)
return self._wrap_ctx(ctx, encrypt=False)
def _wrap_ctx(
self, ctx: _BackendCipherContext, encrypt: bool
) -> AEADEncryptionContext | AEADDecryptionContext | CipherContext:
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
if encrypt:
return _AEADEncryptionContext(ctx)
else:
return _AEADDecryptionContext(ctx)
else:
return _CipherContext(ctx)
| (algorithm: 'CipherAlgorithm', mode: 'Mode', backend: 'typing.Any' = None) -> 'None' |
6,945 | cryptography.hazmat.primitives.ciphers.base | __init__ | null | def __init__(
self,
algorithm: CipherAlgorithm,
mode: Mode,
backend: typing.Any = None,
) -> None:
if not isinstance(algorithm, CipherAlgorithm):
raise TypeError("Expected interface of CipherAlgorithm.")
if mode is not None:
# mypy needs this assert to narrow the type from our generic
# type. Maybe it won't some time in the future.
assert isinstance(mode, modes.Mode)
mode.validate_for_algorithm(algorithm)
self.algorithm = algorithm
self.mode = mode
| (self, algorithm: cryptography.hazmat.primitives._cipheralgorithm.CipherAlgorithm, mode: +Mode, backend: Optional[Any] = None) -> NoneType |
6,946 | cryptography.hazmat.primitives.ciphers.base | _wrap_ctx | null | def _wrap_ctx(
self, ctx: _BackendCipherContext, encrypt: bool
) -> AEADEncryptionContext | AEADDecryptionContext | CipherContext:
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
if encrypt:
return _AEADEncryptionContext(ctx)
else:
return _AEADDecryptionContext(ctx)
else:
return _CipherContext(ctx)
| (self, ctx: '_BackendCipherContext', encrypt: 'bool') -> 'AEADEncryptionContext | AEADDecryptionContext | CipherContext' |
6,947 | cryptography.hazmat.primitives.ciphers.base | decryptor | null | def decryptor(self):
from cryptography.hazmat.backends.openssl.backend import backend
ctx = backend.create_symmetric_decryption_ctx(
self.algorithm, self.mode
)
return self._wrap_ctx(ctx, encrypt=False)
| (self) |
6,948 | cryptography.hazmat.primitives.ciphers.base | encryptor | null | def encryptor(self):
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
if self.mode.tag is not None:
raise ValueError(
"Authentication tag must be None when encrypting."
)
from cryptography.hazmat.backends.openssl.backend import backend
ctx = backend.create_symmetric_encryption_ctx(
self.algorithm, self.mode
)
return self._wrap_ctx(ctx, encrypt=True)
| (self) |
6,949 | growthbook | Experiment | null | class Experiment(object):
def __init__(
self,
key: str,
variations: list,
weights: List[float] = None,
active: bool = True,
status: str = "running",
coverage: int = None,
condition: dict = None,
namespace: Tuple[str, float, float] = None,
url: str = "",
include=None,
groups: list = None,
force: int = None,
hashAttribute: str = "id",
fallbackAttribute: str = None,
hashVersion: int = None,
ranges: List[Tuple[float, float]] = None,
meta: List[VariationMeta] = None,
filters: List[Filter] = None,
seed: str = None,
name: str = None,
phase: str = None,
disableStickyBucketing: bool = False,
bucketVersion: int = None,
minBucketVersion: int = None,
parentConditions: List[dict] = None,
) -> None:
self.key = key
self.variations = variations
self.weights = weights
self.active = active
self.coverage = coverage
self.condition = condition
self.namespace = namespace
self.force = force
self.hashAttribute = hashAttribute
self.hashVersion = hashVersion or 1
self.ranges = ranges
self.meta = meta
self.filters = filters
self.seed = seed
self.name = name
self.phase = phase
self.disableStickyBucketing = disableStickyBucketing
self.bucketVersion = bucketVersion or 0
self.minBucketVersion = minBucketVersion or 0
self.parentConditions = parentConditions
self.fallbackAttribute = None
if not self.disableStickyBucketing:
self.fallbackAttribute = fallbackAttribute
# Deprecated properties
self.status = status
self.url = url
self.include = include
self.groups = groups
def to_dict(self):
obj = {
"key": self.key,
"variations": self.variations,
"weights": self.weights,
"active": self.active,
"coverage": self.coverage or 1,
"condition": self.condition,
"namespace": self.namespace,
"force": self.force,
"hashAttribute": self.hashAttribute,
"hashVersion": self.hashVersion,
"ranges": self.ranges,
"meta": self.meta,
"filters": self.filters,
"seed": self.seed,
"name": self.name,
"phase": self.phase,
}
if self.fallbackAttribute:
obj["fallbackAttribute"] = self.fallbackAttribute
if self.disableStickyBucketing:
obj["disableStickyBucketing"] = True
if self.bucketVersion:
obj["bucketVersion"] = self.bucketVersion
if self.minBucketVersion:
obj["minBucketVersion"] = self.minBucketVersion
if self.parentConditions:
obj["parentConditions"] = self.parentConditions
return obj
def update(self, data: dict) -> None:
weights = data.get("weights", None)
status = data.get("status", None)
coverage = data.get("coverage", None)
url = data.get("url", None)
groups = data.get("groups", None)
force = data.get("force", None)
if weights is not None:
self.weights = weights
if status is not None:
self.status = status
if coverage is not None:
self.coverage = coverage
if url is not None:
self.url = url
if groups is not None:
self.groups = groups
if force is not None:
self.force = force
| (key: str, variations: list, weights: List[float] = None, active: bool = True, status: str = 'running', coverage: int = None, condition: dict = None, namespace: Tuple[str, float, float] = None, url: str = '', include=None, groups: list = None, force: int = None, hashAttribute: str = 'id', fallbackAttribute: str = None, hashVersion: int = None, ranges: List[Tuple[float, float]] = None, meta: List[growthbook.VariationMeta] = None, filters: List[growthbook.Filter] = None, seed: str = None, name: str = None, phase: str = None, disableStickyBucketing: bool = False, bucketVersion: int = None, minBucketVersion: int = None, parentConditions: List[dict] = None) -> None |
6,950 | growthbook | __init__ | null | def __init__(
self,
key: str,
variations: list,
weights: List[float] = None,
active: bool = True,
status: str = "running",
coverage: int = None,
condition: dict = None,
namespace: Tuple[str, float, float] = None,
url: str = "",
include=None,
groups: list = None,
force: int = None,
hashAttribute: str = "id",
fallbackAttribute: str = None,
hashVersion: int = None,
ranges: List[Tuple[float, float]] = None,
meta: List[VariationMeta] = None,
filters: List[Filter] = None,
seed: str = None,
name: str = None,
phase: str = None,
disableStickyBucketing: bool = False,
bucketVersion: int = None,
minBucketVersion: int = None,
parentConditions: List[dict] = None,
) -> None:
self.key = key
self.variations = variations
self.weights = weights
self.active = active
self.coverage = coverage
self.condition = condition
self.namespace = namespace
self.force = force
self.hashAttribute = hashAttribute
self.hashVersion = hashVersion or 1
self.ranges = ranges
self.meta = meta
self.filters = filters
self.seed = seed
self.name = name
self.phase = phase
self.disableStickyBucketing = disableStickyBucketing
self.bucketVersion = bucketVersion or 0
self.minBucketVersion = minBucketVersion or 0
self.parentConditions = parentConditions
self.fallbackAttribute = None
if not self.disableStickyBucketing:
self.fallbackAttribute = fallbackAttribute
# Deprecated properties
self.status = status
self.url = url
self.include = include
self.groups = groups
| (self, key: str, variations: list, weights: Optional[List[float]] = None, active: bool = True, status: str = 'running', coverage: Optional[int] = None, condition: Optional[dict] = None, namespace: Optional[Tuple[str, float, float]] = None, url: str = '', include=None, groups: Optional[list] = None, force: Optional[int] = None, hashAttribute: str = 'id', fallbackAttribute: Optional[str] = None, hashVersion: Optional[int] = None, ranges: Optional[List[Tuple[float, float]]] = None, meta: Optional[List[growthbook.VariationMeta]] = None, filters: Optional[List[growthbook.Filter]] = None, seed: Optional[str] = None, name: Optional[str] = None, phase: Optional[str] = None, disableStickyBucketing: bool = False, bucketVersion: Optional[int] = None, minBucketVersion: Optional[int] = None, parentConditions: Optional[List[dict]] = None) -> NoneType |
6,951 | growthbook | to_dict | null | def to_dict(self):
obj = {
"key": self.key,
"variations": self.variations,
"weights": self.weights,
"active": self.active,
"coverage": self.coverage or 1,
"condition": self.condition,
"namespace": self.namespace,
"force": self.force,
"hashAttribute": self.hashAttribute,
"hashVersion": self.hashVersion,
"ranges": self.ranges,
"meta": self.meta,
"filters": self.filters,
"seed": self.seed,
"name": self.name,
"phase": self.phase,
}
if self.fallbackAttribute:
obj["fallbackAttribute"] = self.fallbackAttribute
if self.disableStickyBucketing:
obj["disableStickyBucketing"] = True
if self.bucketVersion:
obj["bucketVersion"] = self.bucketVersion
if self.minBucketVersion:
obj["minBucketVersion"] = self.minBucketVersion
if self.parentConditions:
obj["parentConditions"] = self.parentConditions
return obj
| (self) |
6,952 | growthbook | update | null | def update(self, data: dict) -> None:
weights = data.get("weights", None)
status = data.get("status", None)
coverage = data.get("coverage", None)
url = data.get("url", None)
groups = data.get("groups", None)
force = data.get("force", None)
if weights is not None:
self.weights = weights
if status is not None:
self.status = status
if coverage is not None:
self.coverage = coverage
if url is not None:
self.url = url
if groups is not None:
self.groups = groups
if force is not None:
self.force = force
| (self, data: dict) -> NoneType |
6,953 | growthbook | Feature | null | class Feature(object):
def __init__(self, defaultValue=None, rules: list = []) -> None:
self.defaultValue = defaultValue
self.rules: List[FeatureRule] = []
for rule in rules:
if isinstance(rule, FeatureRule):
self.rules.append(rule)
else:
self.rules.append(FeatureRule(
id=rule.get("id", None),
key=rule.get("key", ""),
variations=rule.get("variations", None),
weights=rule.get("weights", None),
coverage=rule.get("coverage", None),
condition=rule.get("condition", None),
namespace=rule.get("namespace", None),
force=rule.get("force", None),
hashAttribute=rule.get("hashAttribute", "id"),
fallbackAttribute=rule.get("fallbackAttribute", None),
hashVersion=rule.get("hashVersion", None),
range=rule.get("range", None),
ranges=rule.get("ranges", None),
meta=rule.get("meta", None),
filters=rule.get("filters", None),
seed=rule.get("seed", None),
name=rule.get("name", None),
phase=rule.get("phase", None),
disableStickyBucketing=rule.get("disableStickyBucketing", False),
bucketVersion=rule.get("bucketVersion", None),
minBucketVersion=rule.get("minBucketVersion", None),
parentConditions=rule.get("parentConditions", None),
))
def to_dict(self) -> dict:
return {
"defaultValue": self.defaultValue,
"rules": [rule.to_dict() for rule in self.rules],
}
| (defaultValue=None, rules: list = []) -> None |
6,954 | growthbook | __init__ | null | def __init__(self, defaultValue=None, rules: list = []) -> None:
self.defaultValue = defaultValue
self.rules: List[FeatureRule] = []
for rule in rules:
if isinstance(rule, FeatureRule):
self.rules.append(rule)
else:
self.rules.append(FeatureRule(
id=rule.get("id", None),
key=rule.get("key", ""),
variations=rule.get("variations", None),
weights=rule.get("weights", None),
coverage=rule.get("coverage", None),
condition=rule.get("condition", None),
namespace=rule.get("namespace", None),
force=rule.get("force", None),
hashAttribute=rule.get("hashAttribute", "id"),
fallbackAttribute=rule.get("fallbackAttribute", None),
hashVersion=rule.get("hashVersion", None),
range=rule.get("range", None),
ranges=rule.get("ranges", None),
meta=rule.get("meta", None),
filters=rule.get("filters", None),
seed=rule.get("seed", None),
name=rule.get("name", None),
phase=rule.get("phase", None),
disableStickyBucketing=rule.get("disableStickyBucketing", False),
bucketVersion=rule.get("bucketVersion", None),
minBucketVersion=rule.get("minBucketVersion", None),
parentConditions=rule.get("parentConditions", None),
))
| (self, defaultValue=None, rules: list = []) -> NoneType |
6,955 | growthbook | to_dict | null | def to_dict(self) -> dict:
return {
"defaultValue": self.defaultValue,
"rules": [rule.to_dict() for rule in self.rules],
}
| (self) -> dict |
6,956 | growthbook | FeatureRepository | null | class FeatureRepository(object):
def __init__(self) -> None:
self.cache: AbstractFeatureCache = InMemoryFeatureCache()
self.http: Optional[PoolManager] = None
def set_cache(self, cache: AbstractFeatureCache) -> None:
self.cache = cache
def clear_cache(self):
self.cache.clear()
# Loads features with an in-memory cache in front
def load_features(
self, api_host: str, client_key: str, decryption_key: str = "", ttl: int = 60
) -> Optional[Dict]:
key = api_host + "::" + client_key
cached = self.cache.get(key)
if not cached:
res = self._fetch_features(api_host, client_key, decryption_key)
if res is not None:
self.cache.set(key, res, ttl)
logger.debug("Fetched features from API, stored in cache")
return res
return cached
# Perform the GET request (separate method for easy mocking)
def _get(self, url: str):
self.http = self.http or PoolManager()
return self.http.request("GET", url)
def _fetch_and_decode(self, api_host: str, client_key: str) -> Optional[Dict]:
try:
r = self._get(self._get_features_url(api_host, client_key))
if r.status >= 400:
logger.warning(
"Failed to fetch features, received status code %d", r.status
)
return None
decoded = json.loads(r.data.decode("utf-8"))
return decoded
except Exception:
logger.warning("Failed to decode feature JSON from GrowthBook API")
return None
# Fetch features from the GrowthBook API
def _fetch_features(
self, api_host: str, client_key: str, decryption_key: str = ""
) -> Optional[Dict]:
decoded = self._fetch_and_decode(api_host, client_key)
if not decoded:
return None
if "encryptedFeatures" in decoded:
if not decryption_key:
raise ValueError("Must specify decryption_key")
try:
decrypted = decrypt(decoded["encryptedFeatures"], decryption_key)
return json.loads(decrypted)
except Exception:
logger.warning(
"Failed to decrypt features from GrowthBook API response"
)
return None
elif "features" in decoded:
return decoded["features"]
else:
logger.warning("GrowthBook API response missing features")
return None
@staticmethod
def _get_features_url(api_host: str, client_key: str) -> str:
api_host = (api_host or "https://cdn.growthbook.io").rstrip("/")
return api_host + "/api/features/" + client_key
| () -> None |
6,957 | growthbook | __init__ | null | def __init__(self) -> None:
self.cache: AbstractFeatureCache = InMemoryFeatureCache()
self.http: Optional[PoolManager] = None
| (self) -> NoneType |
6,958 | growthbook | _fetch_and_decode | null | def _fetch_and_decode(self, api_host: str, client_key: str) -> Optional[Dict]:
try:
r = self._get(self._get_features_url(api_host, client_key))
if r.status >= 400:
logger.warning(
"Failed to fetch features, received status code %d", r.status
)
return None
decoded = json.loads(r.data.decode("utf-8"))
return decoded
except Exception:
logger.warning("Failed to decode feature JSON from GrowthBook API")
return None
| (self, api_host: str, client_key: str) -> Optional[Dict] |
6,959 | growthbook | _fetch_features | null | def _fetch_features(
self, api_host: str, client_key: str, decryption_key: str = ""
) -> Optional[Dict]:
decoded = self._fetch_and_decode(api_host, client_key)
if not decoded:
return None
if "encryptedFeatures" in decoded:
if not decryption_key:
raise ValueError("Must specify decryption_key")
try:
decrypted = decrypt(decoded["encryptedFeatures"], decryption_key)
return json.loads(decrypted)
except Exception:
logger.warning(
"Failed to decrypt features from GrowthBook API response"
)
return None
elif "features" in decoded:
return decoded["features"]
else:
logger.warning("GrowthBook API response missing features")
return None
| (self, api_host: str, client_key: str, decryption_key: str = '') -> Optional[Dict] |
6,960 | growthbook | _get | null | def _get(self, url: str):
self.http = self.http or PoolManager()
return self.http.request("GET", url)
| (self, url: str) |
6,961 | growthbook | _get_features_url | null | @staticmethod
def _get_features_url(api_host: str, client_key: str) -> str:
api_host = (api_host or "https://cdn.growthbook.io").rstrip("/")
return api_host + "/api/features/" + client_key
| (api_host: str, client_key: str) -> str |
6,962 | growthbook | clear_cache | null | def clear_cache(self):
self.cache.clear()
| (self) |
6,963 | growthbook | load_features | null | def load_features(
self, api_host: str, client_key: str, decryption_key: str = "", ttl: int = 60
) -> Optional[Dict]:
key = api_host + "::" + client_key
cached = self.cache.get(key)
if not cached:
res = self._fetch_features(api_host, client_key, decryption_key)
if res is not None:
self.cache.set(key, res, ttl)
logger.debug("Fetched features from API, stored in cache")
return res
return cached
| (self, api_host: str, client_key: str, decryption_key: str = '', ttl: int = 60) -> Optional[Dict] |
6,964 | growthbook | set_cache | null | def set_cache(self, cache: AbstractFeatureCache) -> None:
self.cache = cache
| (self, cache: growthbook.AbstractFeatureCache) -> NoneType |
6,965 | growthbook | FeatureResult | null | class FeatureResult(object):
def __init__(
self,
value,
source: str,
experiment: Experiment = None,
experimentResult: Result = None,
ruleId: str = None,
) -> None:
self.value = value
self.source = source
self.ruleId = ruleId
self.experiment = experiment
self.experimentResult = experimentResult
self.on = bool(value)
self.off = not bool(value)
def to_dict(self) -> dict:
data = {
"value": self.value,
"source": self.source,
"on": self.on,
"off": self.off,
}
if self.ruleId:
data["ruleId"] = self.ruleId
if self.experiment:
data["experiment"] = self.experiment.to_dict()
if self.experimentResult:
data["experimentResult"] = self.experimentResult.to_dict()
return data
| (value, source: str, experiment: growthbook.Experiment = None, experimentResult: growthbook.Result = None, ruleId: str = None) -> None |
6,966 | growthbook | __init__ | null | def __init__(
self,
value,
source: str,
experiment: Experiment = None,
experimentResult: Result = None,
ruleId: str = None,
) -> None:
self.value = value
self.source = source
self.ruleId = ruleId
self.experiment = experiment
self.experimentResult = experimentResult
self.on = bool(value)
self.off = not bool(value)
| (self, value, source: str, experiment: Optional[growthbook.Experiment] = None, experimentResult: Optional[growthbook.Result] = None, ruleId: Optional[str] = None) -> NoneType |
6,967 | growthbook | to_dict | null | def to_dict(self) -> dict:
data = {
"value": self.value,
"source": self.source,
"on": self.on,
"off": self.off,
}
if self.ruleId:
data["ruleId"] = self.ruleId
if self.experiment:
data["experiment"] = self.experiment.to_dict()
if self.experimentResult:
data["experimentResult"] = self.experimentResult.to_dict()
return data
| (self) -> dict |
6,968 | growthbook | FeatureRule | null | class FeatureRule(object):
def __init__(
self,
id: str = None,
key: str = "",
variations: list = None,
weights: List[float] = None,
coverage: int = None,
condition: dict = None,
namespace: Tuple[str, float, float] = None,
force=None,
hashAttribute: str = "id",
fallbackAttribute: str = None,
hashVersion: int = None,
range: Tuple[float, float] = None,
ranges: List[Tuple[float, float]] = None,
meta: List[VariationMeta] = None,
filters: List[Filter] = None,
seed: str = None,
name: str = None,
phase: str = None,
disableStickyBucketing: bool = False,
bucketVersion: int = None,
minBucketVersion: int = None,
parentConditions: List[dict] = None,
) -> None:
if disableStickyBucketing:
fallbackAttribute = None
self.id = id
self.key = key
self.variations = variations
self.weights = weights
self.coverage = coverage
self.condition = condition
self.namespace = namespace
self.force = force
self.hashAttribute = hashAttribute
self.fallbackAttribute = fallbackAttribute
self.hashVersion = hashVersion or 1
self.range = range
self.ranges = ranges
self.meta = meta
self.filters = filters
self.seed = seed
self.name = name
self.phase = phase
self.disableStickyBucketing = disableStickyBucketing
self.bucketVersion = bucketVersion or 0
self.minBucketVersion = minBucketVersion or 0
self.parentConditions = parentConditions
def to_dict(self) -> dict:
data: Dict[str, Any] = {}
if self.id:
data["id"] = self.id
if self.key:
data["key"] = self.key
if self.variations is not None:
data["variations"] = self.variations
if self.weights is not None:
data["weights"] = self.weights
if self.coverage and self.coverage != 1:
data["coverage"] = self.coverage
if self.condition is not None:
data["condition"] = self.condition
if self.namespace is not None:
data["namespace"] = self.namespace
if self.force is not None:
data["force"] = self.force
if self.hashAttribute != "id":
data["hashAttribute"] = self.hashAttribute
if self.hashVersion:
data["hashVersion"] = self.hashVersion
if self.range is not None:
data["range"] = self.range
if self.ranges is not None:
data["ranges"] = self.ranges
if self.meta is not None:
data["meta"] = self.meta
if self.filters is not None:
data["filters"] = self.filters
if self.seed is not None:
data["seed"] = self.seed
if self.name is not None:
data["name"] = self.name
if self.phase is not None:
data["phase"] = self.phase
if self.fallbackAttribute:
data["fallbackAttribute"] = self.fallbackAttribute
if self.disableStickyBucketing:
data["disableStickyBucketing"] = True
if self.bucketVersion:
data["bucketVersion"] = self.bucketVersion
if self.minBucketVersion:
data["minBucketVersion"] = self.minBucketVersion
if self.parentConditions:
data["parentConditions"] = self.parentConditions
return data
| (id: str = None, key: str = '', variations: list = None, weights: List[float] = None, coverage: int = None, condition: dict = None, namespace: Tuple[str, float, float] = None, force=None, hashAttribute: str = 'id', fallbackAttribute: str = None, hashVersion: int = None, range: Tuple[float, float] = None, ranges: List[Tuple[float, float]] = None, meta: List[growthbook.VariationMeta] = None, filters: List[growthbook.Filter] = None, seed: str = None, name: str = None, phase: str = None, disableStickyBucketing: bool = False, bucketVersion: int = None, minBucketVersion: int = None, parentConditions: List[dict] = None) -> None |
6,969 | growthbook | __init__ | null | def __init__(
self,
id: str = None,
key: str = "",
variations: list = None,
weights: List[float] = None,
coverage: int = None,
condition: dict = None,
namespace: Tuple[str, float, float] = None,
force=None,
hashAttribute: str = "id",
fallbackAttribute: str = None,
hashVersion: int = None,
range: Tuple[float, float] = None,
ranges: List[Tuple[float, float]] = None,
meta: List[VariationMeta] = None,
filters: List[Filter] = None,
seed: str = None,
name: str = None,
phase: str = None,
disableStickyBucketing: bool = False,
bucketVersion: int = None,
minBucketVersion: int = None,
parentConditions: List[dict] = None,
) -> None:
if disableStickyBucketing:
fallbackAttribute = None
self.id = id
self.key = key
self.variations = variations
self.weights = weights
self.coverage = coverage
self.condition = condition
self.namespace = namespace
self.force = force
self.hashAttribute = hashAttribute
self.fallbackAttribute = fallbackAttribute
self.hashVersion = hashVersion or 1
self.range = range
self.ranges = ranges
self.meta = meta
self.filters = filters
self.seed = seed
self.name = name
self.phase = phase
self.disableStickyBucketing = disableStickyBucketing
self.bucketVersion = bucketVersion or 0
self.minBucketVersion = minBucketVersion or 0
self.parentConditions = parentConditions
| (self, id: Optional[str] = None, key: str = '', variations: Optional[list] = None, weights: Optional[List[float]] = None, coverage: Optional[int] = None, condition: Optional[dict] = None, namespace: Optional[Tuple[str, float, float]] = None, force=None, hashAttribute: str = 'id', fallbackAttribute: Optional[str] = None, hashVersion: Optional[int] = None, range: Optional[Tuple[float, float]] = None, ranges: Optional[List[Tuple[float, float]]] = None, meta: Optional[List[growthbook.VariationMeta]] = None, filters: Optional[List[growthbook.Filter]] = None, seed: Optional[str] = None, name: Optional[str] = None, phase: Optional[str] = None, disableStickyBucketing: bool = False, bucketVersion: Optional[int] = None, minBucketVersion: Optional[int] = None, parentConditions: Optional[List[dict]] = None) -> NoneType |
6,970 | growthbook | to_dict | null | def to_dict(self) -> dict:
data: Dict[str, Any] = {}
if self.id:
data["id"] = self.id
if self.key:
data["key"] = self.key
if self.variations is not None:
data["variations"] = self.variations
if self.weights is not None:
data["weights"] = self.weights
if self.coverage and self.coverage != 1:
data["coverage"] = self.coverage
if self.condition is not None:
data["condition"] = self.condition
if self.namespace is not None:
data["namespace"] = self.namespace
if self.force is not None:
data["force"] = self.force
if self.hashAttribute != "id":
data["hashAttribute"] = self.hashAttribute
if self.hashVersion:
data["hashVersion"] = self.hashVersion
if self.range is not None:
data["range"] = self.range
if self.ranges is not None:
data["ranges"] = self.ranges
if self.meta is not None:
data["meta"] = self.meta
if self.filters is not None:
data["filters"] = self.filters
if self.seed is not None:
data["seed"] = self.seed
if self.name is not None:
data["name"] = self.name
if self.phase is not None:
data["phase"] = self.phase
if self.fallbackAttribute:
data["fallbackAttribute"] = self.fallbackAttribute
if self.disableStickyBucketing:
data["disableStickyBucketing"] = True
if self.bucketVersion:
data["bucketVersion"] = self.bucketVersion
if self.minBucketVersion:
data["minBucketVersion"] = self.minBucketVersion
if self.parentConditions:
data["parentConditions"] = self.parentConditions
return data
| (self) -> dict |
6,971 | growthbook | Filter | null | class Filter(TypedDict):
seed: str
ranges: List[Tuple[float, float]]
hashVersion: int
attribute: str
| null |
6,972 | growthbook | GrowthBook | null | class GrowthBook(object):
def __init__(
self,
enabled: bool = True,
attributes: dict = {},
url: str = "",
features: dict = {},
qa_mode: bool = False,
on_experiment_viewed=None,
api_host: str = "",
client_key: str = "",
decryption_key: str = "",
cache_ttl: int = 60,
forced_variations: dict = {},
sticky_bucket_service: AbstractStickyBucketService = None,
sticky_bucket_identifier_attributes: List[str] = None,
# Deprecated args
trackingCallback=None,
qaMode: bool = False,
user: dict = {},
groups: dict = {},
overrides: dict = {},
forcedVariations: dict = {},
):
self._enabled = enabled
self._attributes = attributes
self._url = url
self._features: Dict[str, Feature] = {}
self._api_host = api_host
self._client_key = client_key
self._decryption_key = decryption_key
self._cache_ttl = cache_ttl
self.sticky_bucket_identifier_attributes = sticky_bucket_identifier_attributes
self.sticky_bucket_service = sticky_bucket_service
self._sticky_bucket_assignment_docs: dict = {}
self._using_derived_sticky_bucket_attributes = not sticky_bucket_identifier_attributes
self._sticky_bucket_attributes: Optional[dict] = None
self._qaMode = qa_mode or qaMode
self._trackingCallback = on_experiment_viewed or trackingCallback
# Deprecated args
self._user = user
self._groups = groups
self._overrides = overrides
self._forcedVariations = forced_variations or forcedVariations
self._tracked: Dict[str, Any] = {}
self._assigned: Dict[str, Any] = {}
self._subscriptions: Set[Any] = set()
if features:
self.setFeatures(features)
def load_features(self) -> None:
if not self._client_key:
raise ValueError("Must specify `client_key` to refresh features")
features = feature_repo.load_features(
self._api_host, self._client_key, self._decryption_key, self._cache_ttl
)
if features is not None:
self.setFeatures(features)
# @deprecated, use set_features
def setFeatures(self, features: dict) -> None:
return self.set_features(features)
def set_features(self, features: dict) -> None:
self._features = {}
for key, feature in features.items():
if isinstance(feature, Feature):
self._features[key] = feature
else:
self._features[key] = Feature(
rules=feature.get("rules", []),
defaultValue=feature.get("defaultValue", None),
)
self.refresh_sticky_buckets()
# @deprecated, use get_features
def getFeatures(self) -> Dict[str, Feature]:
return self.get_features()
def get_features(self) -> Dict[str, Feature]:
return self._features
# @deprecated, use set_attributes
def setAttributes(self, attributes: dict) -> None:
return self.set_attributes(attributes)
def set_attributes(self, attributes: dict) -> None:
self._attributes = attributes
self.refresh_sticky_buckets()
# @deprecated, use get_attributes
def getAttributes(self) -> dict:
return self.get_attributes()
def get_attributes(self) -> dict:
return self._attributes
def destroy(self) -> None:
self._subscriptions.clear()
self._tracked.clear()
self._assigned.clear()
self._trackingCallback = None
self._forcedVariations.clear()
self._overrides.clear()
self._groups.clear()
self._attributes.clear()
self._features.clear()
# @deprecated, use is_on
def isOn(self, key: str) -> bool:
return self.is_on(key)
def is_on(self, key: str) -> bool:
return self.evalFeature(key).on
# @deprecated, use is_off
def isOff(self, key: str) -> bool:
return self.is_off(key)
def is_off(self, key: str) -> bool:
return self.evalFeature(key).off
# @deprecated, use get_feature_value
def getFeatureValue(self, key: str, fallback):
return self.get_feature_value(key, fallback)
def get_feature_value(self, key: str, fallback):
res = self.evalFeature(key)
return res.value if res.value is not None else fallback
# @deprecated, use eval_feature
def evalFeature(self, key: str) -> FeatureResult:
return self.eval_feature(key)
def eval_prereqs(self, parentConditions: List[dict], stack: Set[str]) -> str:
for parentCondition in parentConditions:
parentRes = self._eval_feature(parentCondition.get("id", None), stack)
if parentRes.source == "cyclicPrerequisite":
return "cyclic"
if not evalCondition({'value': parentRes.value}, parentCondition.get("condition", None)):
if parentCondition.get("gate", False):
return "gate"
return "fail"
return "pass"
def eval_feature(self, key: str) -> FeatureResult:
return self._eval_feature(key, set())
def _eval_feature(self, key: str, stack: Set[str]) -> FeatureResult:
logger.debug("Evaluating feature %s", key)
if key not in self._features:
logger.warning("Unknown feature %s", key)
return FeatureResult(None, "unknownFeature")
if key in stack:
logger.warning("Cyclic prerequisite detected, stack: %s", stack)
return FeatureResult(None, "cyclicPrerequisite")
stack.add(key)
feature = self._features[key]
for rule in feature.rules:
logger.debug("Evaluating feature %s, rule %s", key, rule.to_dict())
if (rule.parentConditions):
prereq_res = self.eval_prereqs(rule.parentConditions, stack)
if prereq_res == "gate":
logger.debug("Top-level prerequisite failed, return None, feature %s", key)
return FeatureResult(None, "prerequisite")
if prereq_res == "cyclic":
# Warning already logged in this case
return FeatureResult(None, "cyclicPrerequisite")
if prereq_res == "fail":
logger.debug("Skip rule because of failing prerequisite, feature %s", key)
continue
if rule.condition:
if not evalCondition(self._attributes, rule.condition):
logger.debug(
"Skip rule because of failed condition, feature %s", key
)
continue
if rule.filters:
if self._isFilteredOut(rule.filters):
logger.debug(
"Skip rule because of filters/namespaces, feature %s", key
)
continue
if rule.force is not None:
if not self._isIncludedInRollout(
rule.seed or key,
rule.hashAttribute,
rule.fallbackAttribute,
rule.range,
rule.coverage,
rule.hashVersion,
):
logger.debug(
"Skip rule because user not included in percentage rollout, feature %s",
key,
)
continue
logger.debug("Force value from rule, feature %s", key)
return FeatureResult(rule.force, "force", ruleId=rule.id)
if rule.variations is None:
logger.warning("Skip invalid rule, feature %s", key)
continue
exp = Experiment(
key=rule.key or key,
variations=rule.variations,
coverage=rule.coverage,
weights=rule.weights,
hashAttribute=rule.hashAttribute,
fallbackAttribute=rule.fallbackAttribute,
namespace=rule.namespace,
hashVersion=rule.hashVersion,
meta=rule.meta,
ranges=rule.ranges,
name=rule.name,
phase=rule.phase,
seed=rule.seed,
filters=rule.filters,
condition=rule.condition,
disableStickyBucketing=rule.disableStickyBucketing,
bucketVersion=rule.bucketVersion,
minBucketVersion=rule.minBucketVersion,
)
result = self._run(exp, key)
self._fireSubscriptions(exp, result)
if not result.inExperiment:
logger.debug(
"Skip rule because user not included in experiment, feature %s", key
)
continue
if result.passthrough:
logger.debug("Continue to next rule, feature %s", key)
continue
logger.debug("Assign value from experiment, feature %s", key)
return FeatureResult(
result.value, "experiment", exp, result, ruleId=rule.id
)
logger.debug("Use default value for feature %s", key)
return FeatureResult(feature.defaultValue, "defaultValue")
# @deprecated, use get_all_results
def getAllResults(self):
return self.get_all_results()
def get_all_results(self):
return self._assigned.copy()
def _getOrigHashValue(self, attr: str = None, fallbackAttr: str = None) -> Tuple[str, str]:
attr = attr or "id"
val = ""
if attr in self._attributes:
val = self._attributes[attr] or ""
elif attr in self._user:
val = self._user[attr] or ""
# If no match, try fallback
if (not val or val == "") and fallbackAttr and self.sticky_bucket_service:
if fallbackAttr in self._attributes:
val = self._attributes[fallbackAttr] or ""
elif fallbackAttr in self._user:
val = self._user[fallbackAttr] or ""
if not val or val != "":
attr = fallbackAttr
return (attr, val)
def _getHashValue(self, attr: str = None, fallbackAttr: str = None) -> Tuple[str, str]:
(attr, val) = self._getOrigHashValue(attr, fallbackAttr)
return (attr, str(val))
def _isIncludedInRollout(
self,
seed: str,
hashAttribute: str = None,
fallbackAttribute: str = None,
range: Tuple[float, float] = None,
coverage: float = None,
hashVersion: int = None,
) -> bool:
if coverage is None and range is None:
return True
(_, hash_value) = self._getHashValue(hashAttribute, fallbackAttribute)
if hash_value == "":
return False
n = gbhash(seed, hash_value, hashVersion or 1)
if n is None:
return False
if range:
return inRange(n, range)
elif coverage is not None:
return n <= coverage
return True
def _isFilteredOut(self, filters: List[Filter]) -> bool:
for filter in filters:
(_, hash_value) = self._getHashValue(filter.get("attribute", "id"))
if hash_value == "":
return False
n = gbhash(filter.get("seed", ""), hash_value, filter.get("hashVersion", 2))
if n is None:
return False
filtered = False
for range in filter["ranges"]:
if inRange(n, range):
filtered = True
break
if not filtered:
return True
return False
def _fireSubscriptions(self, experiment: Experiment, result: Result):
prev = self._assigned.get(experiment.key, None)
if (
not prev
or prev["result"].inExperiment != result.inExperiment
or prev["result"].variationId != result.variationId
):
self._assigned[experiment.key] = {
"experiment": experiment,
"result": result,
}
for cb in self._subscriptions:
try:
cb(experiment, result)
except Exception:
pass
def run(self, experiment: Experiment) -> Result:
result = self._run(experiment)
self._fireSubscriptions(experiment, result)
return result
def subscribe(self, callback):
self._subscriptions.add(callback)
return lambda: self._subscriptions.remove(callback)
def _run(self, experiment: Experiment, featureId: Optional[str] = None) -> Result:
# 1. If experiment has less than 2 variations, return immediately
if len(experiment.variations) < 2:
logger.warning(
"Experiment %s has less than 2 variations, skip", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
# 2. If growthbook is disabled, return immediately
if not self._enabled:
logger.debug(
"Skip experiment %s because GrowthBook is disabled", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
# 2.5. If the experiment props have been overridden, merge them in
if self._overrides.get(experiment.key, None):
experiment.update(self._overrides[experiment.key])
# 3. If experiment is forced via a querystring in the url
qs = getQueryStringOverride(
experiment.key, self._url, len(experiment.variations)
)
if qs is not None:
logger.debug(
"Force variation %d from URL querystring, experiment %s",
qs,
experiment.key,
)
return self._getExperimentResult(experiment, qs, featureId=featureId)
# 4. If variation is forced in the context
if self._forcedVariations.get(experiment.key, None) is not None:
logger.debug(
"Force variation %d from GrowthBook context, experiment %s",
self._forcedVariations[experiment.key],
experiment.key,
)
return self._getExperimentResult(
experiment, self._forcedVariations[experiment.key], featureId=featureId
)
# 5. If experiment is a draft or not active, return immediately
if experiment.status == "draft" or not experiment.active:
logger.debug("Experiment %s is not active, skip", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 6. Get the user hash attribute and value
(hashAttribute, hashValue) = self._getHashValue(experiment.hashAttribute, experiment.fallbackAttribute)
if not hashValue:
logger.debug(
"Skip experiment %s because user's hashAttribute value is empty",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
assigned = -1
found_sticky_bucket = False
sticky_bucket_version_is_blocked = False
if self.sticky_bucket_service and not experiment.disableStickyBucketing:
sticky_bucket = self._get_sticky_bucket_variation(
experiment.key,
experiment.bucketVersion,
experiment.minBucketVersion,
experiment.meta,
hash_attribute=experiment.hashAttribute,
fallback_attribute=experiment.fallbackAttribute,
)
found_sticky_bucket = sticky_bucket.get('variation', 0) >= 0
assigned = sticky_bucket.get('variation', 0)
sticky_bucket_version_is_blocked = sticky_bucket.get('versionIsBlocked', False)
if found_sticky_bucket:
logger.debug("Found sticky bucket for experiment %s, assigning sticky variation %s", experiment.key, assigned)
# Some checks are not needed if we already have a sticky bucket
if not found_sticky_bucket:
# 7. Filtered out / not in namespace
if experiment.filters:
if self._isFilteredOut(experiment.filters):
logger.debug(
"Skip experiment %s because of filters/namespaces", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
elif experiment.namespace and not inNamespace(hashValue, experiment.namespace):
logger.debug("Skip experiment %s because of namespace", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 7.5. If experiment has an include property
if experiment.include:
try:
if not experiment.include():
logger.debug(
"Skip experiment %s because include() returned false",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
except Exception:
logger.warning(
"Skip experiment %s because include() raised an Exception",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# 8. Exclude if condition is false
if experiment.condition and not evalCondition(
self._attributes, experiment.condition
):
logger.debug(
"Skip experiment %s because user failed the condition", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
# 8.05 Exclude if parent conditions are not met
if (experiment.parentConditions):
prereq_res = self.eval_prereqs(experiment.parentConditions, set())
if prereq_res == "gate" or prereq_res == "fail":
logger.debug("Skip experiment %s because of failing prerequisite", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
if prereq_res == "cyclic":
logger.debug("Skip experiment %s because of cyclic prerequisite", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 8.1. Make sure user is in a matching group
if experiment.groups and len(experiment.groups):
expGroups = self._groups or {}
matched = False
for group in experiment.groups:
if expGroups[group]:
matched = True
if not matched:
logger.debug(
"Skip experiment %s because user not in required group",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# The following apply even when in a sticky bucket
# 8.2. If experiment.url is set, see if it's valid
if experiment.url:
if not self._urlIsValid(experiment.url):
logger.debug(
"Skip experiment %s because current URL is not targeted",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# 9. Get bucket ranges and choose variation
n = gbhash(
experiment.seed or experiment.key, hashValue, experiment.hashVersion or 1
)
if n is None:
logger.warning(
"Skip experiment %s because of invalid hashVersion", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
if not found_sticky_bucket:
c = experiment.coverage
ranges = experiment.ranges or getBucketRanges(
len(experiment.variations), c if c is not None else 1, experiment.weights
)
assigned = chooseVariation(n, ranges)
# Unenroll if any prior sticky buckets are blocked by version
if sticky_bucket_version_is_blocked:
logger.debug("Skip experiment %s because sticky bucket version is blocked", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId, stickyBucketUsed=True)
# 10. Return if not in experiment
if assigned < 0:
logger.debug(
"Skip experiment %s because user is not included in the rollout",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# 11. If experiment is forced, return immediately
if experiment.force is not None:
logger.debug(
"Force variation %d in experiment %s", experiment.force, experiment.key
)
return self._getExperimentResult(
experiment, experiment.force, featureId=featureId
)
# 12. Exclude if in QA mode
if self._qaMode:
logger.debug("Skip experiment %s because of QA Mode", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 12.5. If experiment is stopped, return immediately
if experiment.status == "stopped":
logger.debug("Skip experiment %s because it is stopped", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 13. Build the result object
result = self._getExperimentResult(
experiment, assigned, True, featureId=featureId, bucket=n, stickyBucketUsed=found_sticky_bucket
)
# 13.5 Persist sticky bucket
if self.sticky_bucket_service and not experiment.disableStickyBucketing:
assignment = {}
assignment[self._get_sticky_bucket_experiment_key(
experiment.key,
experiment.bucketVersion
)] = result.key
data = self._generate_sticky_bucket_assignment_doc(
hashAttribute,
hashValue,
assignment
)
doc = data.get("doc", None)
if doc and data.get('changed', False):
if not self._sticky_bucket_assignment_docs:
self._sticky_bucket_assignment_docs = {}
self._sticky_bucket_assignment_docs[data.get('key')] = doc
self.sticky_bucket_service.save_assignments(doc)
# 14. Fire the tracking callback if set
self._track(experiment, result)
# 15. Return the result
logger.debug("Assigned variation %d in experiment %s", assigned, experiment.key)
return result
def _track(self, experiment: Experiment, result: Result) -> None:
if not self._trackingCallback:
return None
key = (
result.hashAttribute
+ str(result.hashValue)
+ experiment.key
+ str(result.variationId)
)
if not self._tracked.get(key):
try:
self._trackingCallback(experiment=experiment, result=result)
self._tracked[key] = True
except Exception:
pass
def _urlIsValid(self, pattern) -> bool:
if not self._url:
return False
try:
r = re.compile(pattern)
if r.search(self._url):
return True
pathOnly = re.sub(r"^[^/]*/", "/", re.sub(r"^https?:\/\/", "", self._url))
if r.search(pathOnly):
return True
return False
except Exception:
return True
def _getExperimentResult(
self,
experiment: Experiment,
variationId: int = -1,
hashUsed: bool = False,
featureId: str = None,
bucket: float = None,
stickyBucketUsed: bool = False
) -> Result:
inExperiment = True
if variationId < 0 or variationId > len(experiment.variations) - 1:
variationId = 0
inExperiment = False
meta = None
if experiment.meta:
meta = experiment.meta[variationId]
(hashAttribute, hashValue) = self._getOrigHashValue(experiment.hashAttribute, experiment.fallbackAttribute)
return Result(
featureId=featureId,
inExperiment=inExperiment,
variationId=variationId,
value=experiment.variations[variationId],
hashUsed=hashUsed,
hashAttribute=hashAttribute,
hashValue=hashValue,
meta=meta,
bucket=bucket,
stickyBucketUsed=stickyBucketUsed
)
def _derive_sticky_bucket_identifier_attributes(self) -> List[str]:
attributes = set()
for key, feature in self._features.items():
for rule in feature.rules:
if rule.variations:
attributes.add(rule.hashAttribute or "id")
if rule.fallbackAttribute:
attributes.add(rule.fallbackAttribute)
return list(attributes)
def _get_sticky_bucket_attributes(self) -> dict:
attributes: Dict[str, str] = {}
if self._using_derived_sticky_bucket_attributes:
self.sticky_bucket_identifier_attributes = self._derive_sticky_bucket_identifier_attributes()
if not self.sticky_bucket_identifier_attributes:
return attributes
for attr in self.sticky_bucket_identifier_attributes:
_, hash_value = self._getHashValue(attr)
if hash_value:
attributes[attr] = hash_value
return attributes
def _get_sticky_bucket_assignments(self, attr: str = None, fallback: str = None) -> Dict[str, str]:
merged: Dict[str, str] = {}
_, hashValue = self._getHashValue(attr)
key = f"{attr}||{hashValue}"
if key in self._sticky_bucket_assignment_docs:
merged = self._sticky_bucket_assignment_docs[key].get("assignments", {})
if fallback:
_, hashValue = self._getHashValue(fallback)
key = f"{fallback}||{hashValue}"
if key in self._sticky_bucket_assignment_docs:
# Merge the fallback assignments, but don't overwrite existing ones
for k, v in self._sticky_bucket_assignment_docs[key].get("assignments", {}).items():
if k not in merged:
merged[k] = v
return merged
def _is_blocked(
self,
assignments: Dict[str, str],
experiment_key: str,
min_bucket_version: int
) -> bool:
if min_bucket_version > 0:
for i in range(min_bucket_version):
blocked_key = self._get_sticky_bucket_experiment_key(experiment_key, i)
if blocked_key in assignments:
return True
return False
def _get_sticky_bucket_variation(
self,
experiment_key: str,
bucket_version: int = None,
min_bucket_version: int = None,
meta: List[VariationMeta] = None,
hash_attribute: str = None,
fallback_attribute: str = None
) -> dict:
bucket_version = bucket_version or 0
min_bucket_version = min_bucket_version or 0
meta = meta or []
id = self._get_sticky_bucket_experiment_key(experiment_key, bucket_version)
assignments = self._get_sticky_bucket_assignments(hash_attribute, fallback_attribute)
if self._is_blocked(assignments, experiment_key, min_bucket_version):
return {
'variation': -1,
'versionIsBlocked': True
}
variation_key = assignments.get(id, None)
if not variation_key:
return {
'variation': -1
}
# Find the key in meta
variation = next((i for i, v in enumerate(meta) if v.get("key") == variation_key), -1)
if variation < 0:
return {
'variation': -1
}
return {'variation': variation}
def _get_sticky_bucket_experiment_key(self, experiment_key: str, bucket_version: int = 0) -> str:
return experiment_key + "__" + str(bucket_version)
def refresh_sticky_buckets(self, force: bool = False) -> None:
if not self.sticky_bucket_service:
return
attributes = self._get_sticky_bucket_attributes()
if not force and attributes == self._sticky_bucket_attributes:
logger.debug("Skipping refresh of sticky bucket assignments, no changes")
return
self._sticky_bucket_attributes = attributes
self._sticky_bucket_assignment_docs = self.sticky_bucket_service.get_all_assignments(attributes)
def _generate_sticky_bucket_assignment_doc(self, attribute_name: str, attribute_value: str, assignments: dict):
key = attribute_name + "||" + attribute_value
existing_assignments = self._sticky_bucket_assignment_docs.get(key, {}).get("assignments", {})
new_assignments = {**existing_assignments, **assignments}
# Compare JSON strings to see if they have changed
existing_json = json.dumps(existing_assignments, sort_keys=True)
new_json = json.dumps(new_assignments, sort_keys=True)
changed = existing_json != new_json
return {
'key': key,
'doc': {
'attributeName': attribute_name,
'attributeValue': attribute_value,
'assignments': new_assignments
},
'changed': changed
}
| (enabled: bool = True, attributes: dict = {}, url: str = '', features: dict = {}, qa_mode: bool = False, on_experiment_viewed=None, api_host: str = '', client_key: str = '', decryption_key: str = '', cache_ttl: int = 60, forced_variations: dict = {}, sticky_bucket_service: growthbook.AbstractStickyBucketService = None, sticky_bucket_identifier_attributes: List[str] = None, trackingCallback=None, qaMode: bool = False, user: dict = {}, groups: dict = {}, overrides: dict = {}, forcedVariations: dict = {}) |
6,973 | growthbook | __init__ | null | def __init__(
self,
enabled: bool = True,
attributes: dict = {},
url: str = "",
features: dict = {},
qa_mode: bool = False,
on_experiment_viewed=None,
api_host: str = "",
client_key: str = "",
decryption_key: str = "",
cache_ttl: int = 60,
forced_variations: dict = {},
sticky_bucket_service: AbstractStickyBucketService = None,
sticky_bucket_identifier_attributes: List[str] = None,
# Deprecated args
trackingCallback=None,
qaMode: bool = False,
user: dict = {},
groups: dict = {},
overrides: dict = {},
forcedVariations: dict = {},
):
self._enabled = enabled
self._attributes = attributes
self._url = url
self._features: Dict[str, Feature] = {}
self._api_host = api_host
self._client_key = client_key
self._decryption_key = decryption_key
self._cache_ttl = cache_ttl
self.sticky_bucket_identifier_attributes = sticky_bucket_identifier_attributes
self.sticky_bucket_service = sticky_bucket_service
self._sticky_bucket_assignment_docs: dict = {}
self._using_derived_sticky_bucket_attributes = not sticky_bucket_identifier_attributes
self._sticky_bucket_attributes: Optional[dict] = None
self._qaMode = qa_mode or qaMode
self._trackingCallback = on_experiment_viewed or trackingCallback
# Deprecated args
self._user = user
self._groups = groups
self._overrides = overrides
self._forcedVariations = forced_variations or forcedVariations
self._tracked: Dict[str, Any] = {}
self._assigned: Dict[str, Any] = {}
self._subscriptions: Set[Any] = set()
if features:
self.setFeatures(features)
| (self, enabled: bool = True, attributes: dict = {}, url: str = '', features: dict = {}, qa_mode: bool = False, on_experiment_viewed=None, api_host: str = '', client_key: str = '', decryption_key: str = '', cache_ttl: int = 60, forced_variations: dict = {}, sticky_bucket_service: Optional[growthbook.AbstractStickyBucketService] = None, sticky_bucket_identifier_attributes: Optional[List[str]] = None, trackingCallback=None, qaMode: bool = False, user: dict = {}, groups: dict = {}, overrides: dict = {}, forcedVariations: dict = {}) |
6,974 | growthbook | _derive_sticky_bucket_identifier_attributes | null | def _derive_sticky_bucket_identifier_attributes(self) -> List[str]:
attributes = set()
for key, feature in self._features.items():
for rule in feature.rules:
if rule.variations:
attributes.add(rule.hashAttribute or "id")
if rule.fallbackAttribute:
attributes.add(rule.fallbackAttribute)
return list(attributes)
| (self) -> List[str] |
6,975 | growthbook | _eval_feature | null | def _eval_feature(self, key: str, stack: Set[str]) -> FeatureResult:
logger.debug("Evaluating feature %s", key)
if key not in self._features:
logger.warning("Unknown feature %s", key)
return FeatureResult(None, "unknownFeature")
if key in stack:
logger.warning("Cyclic prerequisite detected, stack: %s", stack)
return FeatureResult(None, "cyclicPrerequisite")
stack.add(key)
feature = self._features[key]
for rule in feature.rules:
logger.debug("Evaluating feature %s, rule %s", key, rule.to_dict())
if (rule.parentConditions):
prereq_res = self.eval_prereqs(rule.parentConditions, stack)
if prereq_res == "gate":
logger.debug("Top-level prerequisite failed, return None, feature %s", key)
return FeatureResult(None, "prerequisite")
if prereq_res == "cyclic":
# Warning already logged in this case
return FeatureResult(None, "cyclicPrerequisite")
if prereq_res == "fail":
logger.debug("Skip rule because of failing prerequisite, feature %s", key)
continue
if rule.condition:
if not evalCondition(self._attributes, rule.condition):
logger.debug(
"Skip rule because of failed condition, feature %s", key
)
continue
if rule.filters:
if self._isFilteredOut(rule.filters):
logger.debug(
"Skip rule because of filters/namespaces, feature %s", key
)
continue
if rule.force is not None:
if not self._isIncludedInRollout(
rule.seed or key,
rule.hashAttribute,
rule.fallbackAttribute,
rule.range,
rule.coverage,
rule.hashVersion,
):
logger.debug(
"Skip rule because user not included in percentage rollout, feature %s",
key,
)
continue
logger.debug("Force value from rule, feature %s", key)
return FeatureResult(rule.force, "force", ruleId=rule.id)
if rule.variations is None:
logger.warning("Skip invalid rule, feature %s", key)
continue
exp = Experiment(
key=rule.key or key,
variations=rule.variations,
coverage=rule.coverage,
weights=rule.weights,
hashAttribute=rule.hashAttribute,
fallbackAttribute=rule.fallbackAttribute,
namespace=rule.namespace,
hashVersion=rule.hashVersion,
meta=rule.meta,
ranges=rule.ranges,
name=rule.name,
phase=rule.phase,
seed=rule.seed,
filters=rule.filters,
condition=rule.condition,
disableStickyBucketing=rule.disableStickyBucketing,
bucketVersion=rule.bucketVersion,
minBucketVersion=rule.minBucketVersion,
)
result = self._run(exp, key)
self._fireSubscriptions(exp, result)
if not result.inExperiment:
logger.debug(
"Skip rule because user not included in experiment, feature %s", key
)
continue
if result.passthrough:
logger.debug("Continue to next rule, feature %s", key)
continue
logger.debug("Assign value from experiment, feature %s", key)
return FeatureResult(
result.value, "experiment", exp, result, ruleId=rule.id
)
logger.debug("Use default value for feature %s", key)
return FeatureResult(feature.defaultValue, "defaultValue")
| (self, key: str, stack: Set[str]) -> growthbook.FeatureResult |
6,976 | growthbook | _fireSubscriptions | null | def _fireSubscriptions(self, experiment: Experiment, result: Result):
prev = self._assigned.get(experiment.key, None)
if (
not prev
or prev["result"].inExperiment != result.inExperiment
or prev["result"].variationId != result.variationId
):
self._assigned[experiment.key] = {
"experiment": experiment,
"result": result,
}
for cb in self._subscriptions:
try:
cb(experiment, result)
except Exception:
pass
| (self, experiment: growthbook.Experiment, result: growthbook.Result) |
6,977 | growthbook | _generate_sticky_bucket_assignment_doc | null | def _generate_sticky_bucket_assignment_doc(self, attribute_name: str, attribute_value: str, assignments: dict):
key = attribute_name + "||" + attribute_value
existing_assignments = self._sticky_bucket_assignment_docs.get(key, {}).get("assignments", {})
new_assignments = {**existing_assignments, **assignments}
# Compare JSON strings to see if they have changed
existing_json = json.dumps(existing_assignments, sort_keys=True)
new_json = json.dumps(new_assignments, sort_keys=True)
changed = existing_json != new_json
return {
'key': key,
'doc': {
'attributeName': attribute_name,
'attributeValue': attribute_value,
'assignments': new_assignments
},
'changed': changed
}
| (self, attribute_name: str, attribute_value: str, assignments: dict) |
6,978 | growthbook | _getExperimentResult | null | def _getExperimentResult(
self,
experiment: Experiment,
variationId: int = -1,
hashUsed: bool = False,
featureId: str = None,
bucket: float = None,
stickyBucketUsed: bool = False
) -> Result:
inExperiment = True
if variationId < 0 or variationId > len(experiment.variations) - 1:
variationId = 0
inExperiment = False
meta = None
if experiment.meta:
meta = experiment.meta[variationId]
(hashAttribute, hashValue) = self._getOrigHashValue(experiment.hashAttribute, experiment.fallbackAttribute)
return Result(
featureId=featureId,
inExperiment=inExperiment,
variationId=variationId,
value=experiment.variations[variationId],
hashUsed=hashUsed,
hashAttribute=hashAttribute,
hashValue=hashValue,
meta=meta,
bucket=bucket,
stickyBucketUsed=stickyBucketUsed
)
| (self, experiment: growthbook.Experiment, variationId: int = -1, hashUsed: bool = False, featureId: Optional[str] = None, bucket: Optional[float] = None, stickyBucketUsed: bool = False) -> growthbook.Result |
6,979 | growthbook | _getHashValue | null | def _getHashValue(self, attr: str = None, fallbackAttr: str = None) -> Tuple[str, str]:
(attr, val) = self._getOrigHashValue(attr, fallbackAttr)
return (attr, str(val))
| (self, attr: Optional[str] = None, fallbackAttr: Optional[str] = None) -> Tuple[str, str] |
6,980 | growthbook | _getOrigHashValue | null | def _getOrigHashValue(self, attr: str = None, fallbackAttr: str = None) -> Tuple[str, str]:
attr = attr or "id"
val = ""
if attr in self._attributes:
val = self._attributes[attr] or ""
elif attr in self._user:
val = self._user[attr] or ""
# If no match, try fallback
if (not val or val == "") and fallbackAttr and self.sticky_bucket_service:
if fallbackAttr in self._attributes:
val = self._attributes[fallbackAttr] or ""
elif fallbackAttr in self._user:
val = self._user[fallbackAttr] or ""
if not val or val != "":
attr = fallbackAttr
return (attr, val)
| (self, attr: Optional[str] = None, fallbackAttr: Optional[str] = None) -> Tuple[str, str] |
6,981 | growthbook | _get_sticky_bucket_assignments | null | def _get_sticky_bucket_assignments(self, attr: str = None, fallback: str = None) -> Dict[str, str]:
merged: Dict[str, str] = {}
_, hashValue = self._getHashValue(attr)
key = f"{attr}||{hashValue}"
if key in self._sticky_bucket_assignment_docs:
merged = self._sticky_bucket_assignment_docs[key].get("assignments", {})
if fallback:
_, hashValue = self._getHashValue(fallback)
key = f"{fallback}||{hashValue}"
if key in self._sticky_bucket_assignment_docs:
# Merge the fallback assignments, but don't overwrite existing ones
for k, v in self._sticky_bucket_assignment_docs[key].get("assignments", {}).items():
if k not in merged:
merged[k] = v
return merged
| (self, attr: Optional[str] = None, fallback: Optional[str] = None) -> Dict[str, str] |
6,982 | growthbook | _get_sticky_bucket_attributes | null | def _get_sticky_bucket_attributes(self) -> dict:
attributes: Dict[str, str] = {}
if self._using_derived_sticky_bucket_attributes:
self.sticky_bucket_identifier_attributes = self._derive_sticky_bucket_identifier_attributes()
if not self.sticky_bucket_identifier_attributes:
return attributes
for attr in self.sticky_bucket_identifier_attributes:
_, hash_value = self._getHashValue(attr)
if hash_value:
attributes[attr] = hash_value
return attributes
| (self) -> dict |
6,983 | growthbook | _get_sticky_bucket_experiment_key | null | def _get_sticky_bucket_experiment_key(self, experiment_key: str, bucket_version: int = 0) -> str:
return experiment_key + "__" + str(bucket_version)
| (self, experiment_key: str, bucket_version: int = 0) -> str |
6,984 | growthbook | _get_sticky_bucket_variation | null | def _get_sticky_bucket_variation(
self,
experiment_key: str,
bucket_version: int = None,
min_bucket_version: int = None,
meta: List[VariationMeta] = None,
hash_attribute: str = None,
fallback_attribute: str = None
) -> dict:
bucket_version = bucket_version or 0
min_bucket_version = min_bucket_version or 0
meta = meta or []
id = self._get_sticky_bucket_experiment_key(experiment_key, bucket_version)
assignments = self._get_sticky_bucket_assignments(hash_attribute, fallback_attribute)
if self._is_blocked(assignments, experiment_key, min_bucket_version):
return {
'variation': -1,
'versionIsBlocked': True
}
variation_key = assignments.get(id, None)
if not variation_key:
return {
'variation': -1
}
# Find the key in meta
variation = next((i for i, v in enumerate(meta) if v.get("key") == variation_key), -1)
if variation < 0:
return {
'variation': -1
}
return {'variation': variation}
| (self, experiment_key: str, bucket_version: Optional[int] = None, min_bucket_version: Optional[int] = None, meta: Optional[List[growthbook.VariationMeta]] = None, hash_attribute: Optional[str] = None, fallback_attribute: Optional[str] = None) -> dict |
6,985 | growthbook | _isFilteredOut | null | def _isFilteredOut(self, filters: List[Filter]) -> bool:
for filter in filters:
(_, hash_value) = self._getHashValue(filter.get("attribute", "id"))
if hash_value == "":
return False
n = gbhash(filter.get("seed", ""), hash_value, filter.get("hashVersion", 2))
if n is None:
return False
filtered = False
for range in filter["ranges"]:
if inRange(n, range):
filtered = True
break
if not filtered:
return True
return False
| (self, filters: List[growthbook.Filter]) -> bool |
6,986 | growthbook | _isIncludedInRollout | null | def _isIncludedInRollout(
self,
seed: str,
hashAttribute: str = None,
fallbackAttribute: str = None,
range: Tuple[float, float] = None,
coverage: float = None,
hashVersion: int = None,
) -> bool:
if coverage is None and range is None:
return True
(_, hash_value) = self._getHashValue(hashAttribute, fallbackAttribute)
if hash_value == "":
return False
n = gbhash(seed, hash_value, hashVersion or 1)
if n is None:
return False
if range:
return inRange(n, range)
elif coverage is not None:
return n <= coverage
return True
| (self, seed: str, hashAttribute: Optional[str] = None, fallbackAttribute: Optional[str] = None, range: Optional[Tuple[float, float]] = None, coverage: Optional[float] = None, hashVersion: Optional[int] = None) -> bool |
6,987 | growthbook | _is_blocked | null | def _is_blocked(
self,
assignments: Dict[str, str],
experiment_key: str,
min_bucket_version: int
) -> bool:
if min_bucket_version > 0:
for i in range(min_bucket_version):
blocked_key = self._get_sticky_bucket_experiment_key(experiment_key, i)
if blocked_key in assignments:
return True
return False
| (self, assignments: Dict[str, str], experiment_key: str, min_bucket_version: int) -> bool |
6,988 | growthbook | _run | null | def _run(self, experiment: Experiment, featureId: Optional[str] = None) -> Result:
# 1. If experiment has less than 2 variations, return immediately
if len(experiment.variations) < 2:
logger.warning(
"Experiment %s has less than 2 variations, skip", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
# 2. If growthbook is disabled, return immediately
if not self._enabled:
logger.debug(
"Skip experiment %s because GrowthBook is disabled", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
# 2.5. If the experiment props have been overridden, merge them in
if self._overrides.get(experiment.key, None):
experiment.update(self._overrides[experiment.key])
# 3. If experiment is forced via a querystring in the url
qs = getQueryStringOverride(
experiment.key, self._url, len(experiment.variations)
)
if qs is not None:
logger.debug(
"Force variation %d from URL querystring, experiment %s",
qs,
experiment.key,
)
return self._getExperimentResult(experiment, qs, featureId=featureId)
# 4. If variation is forced in the context
if self._forcedVariations.get(experiment.key, None) is not None:
logger.debug(
"Force variation %d from GrowthBook context, experiment %s",
self._forcedVariations[experiment.key],
experiment.key,
)
return self._getExperimentResult(
experiment, self._forcedVariations[experiment.key], featureId=featureId
)
# 5. If experiment is a draft or not active, return immediately
if experiment.status == "draft" or not experiment.active:
logger.debug("Experiment %s is not active, skip", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 6. Get the user hash attribute and value
(hashAttribute, hashValue) = self._getHashValue(experiment.hashAttribute, experiment.fallbackAttribute)
if not hashValue:
logger.debug(
"Skip experiment %s because user's hashAttribute value is empty",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
assigned = -1
found_sticky_bucket = False
sticky_bucket_version_is_blocked = False
if self.sticky_bucket_service and not experiment.disableStickyBucketing:
sticky_bucket = self._get_sticky_bucket_variation(
experiment.key,
experiment.bucketVersion,
experiment.minBucketVersion,
experiment.meta,
hash_attribute=experiment.hashAttribute,
fallback_attribute=experiment.fallbackAttribute,
)
found_sticky_bucket = sticky_bucket.get('variation', 0) >= 0
assigned = sticky_bucket.get('variation', 0)
sticky_bucket_version_is_blocked = sticky_bucket.get('versionIsBlocked', False)
if found_sticky_bucket:
logger.debug("Found sticky bucket for experiment %s, assigning sticky variation %s", experiment.key, assigned)
# Some checks are not needed if we already have a sticky bucket
if not found_sticky_bucket:
# 7. Filtered out / not in namespace
if experiment.filters:
if self._isFilteredOut(experiment.filters):
logger.debug(
"Skip experiment %s because of filters/namespaces", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
elif experiment.namespace and not inNamespace(hashValue, experiment.namespace):
logger.debug("Skip experiment %s because of namespace", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 7.5. If experiment has an include property
if experiment.include:
try:
if not experiment.include():
logger.debug(
"Skip experiment %s because include() returned false",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
except Exception:
logger.warning(
"Skip experiment %s because include() raised an Exception",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# 8. Exclude if condition is false
if experiment.condition and not evalCondition(
self._attributes, experiment.condition
):
logger.debug(
"Skip experiment %s because user failed the condition", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
# 8.05 Exclude if parent conditions are not met
if (experiment.parentConditions):
prereq_res = self.eval_prereqs(experiment.parentConditions, set())
if prereq_res == "gate" or prereq_res == "fail":
logger.debug("Skip experiment %s because of failing prerequisite", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
if prereq_res == "cyclic":
logger.debug("Skip experiment %s because of cyclic prerequisite", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 8.1. Make sure user is in a matching group
if experiment.groups and len(experiment.groups):
expGroups = self._groups or {}
matched = False
for group in experiment.groups:
if expGroups[group]:
matched = True
if not matched:
logger.debug(
"Skip experiment %s because user not in required group",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# The following apply even when in a sticky bucket
# 8.2. If experiment.url is set, see if it's valid
if experiment.url:
if not self._urlIsValid(experiment.url):
logger.debug(
"Skip experiment %s because current URL is not targeted",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# 9. Get bucket ranges and choose variation
n = gbhash(
experiment.seed or experiment.key, hashValue, experiment.hashVersion or 1
)
if n is None:
logger.warning(
"Skip experiment %s because of invalid hashVersion", experiment.key
)
return self._getExperimentResult(experiment, featureId=featureId)
if not found_sticky_bucket:
c = experiment.coverage
ranges = experiment.ranges or getBucketRanges(
len(experiment.variations), c if c is not None else 1, experiment.weights
)
assigned = chooseVariation(n, ranges)
# Unenroll if any prior sticky buckets are blocked by version
if sticky_bucket_version_is_blocked:
logger.debug("Skip experiment %s because sticky bucket version is blocked", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId, stickyBucketUsed=True)
# 10. Return if not in experiment
if assigned < 0:
logger.debug(
"Skip experiment %s because user is not included in the rollout",
experiment.key,
)
return self._getExperimentResult(experiment, featureId=featureId)
# 11. If experiment is forced, return immediately
if experiment.force is not None:
logger.debug(
"Force variation %d in experiment %s", experiment.force, experiment.key
)
return self._getExperimentResult(
experiment, experiment.force, featureId=featureId
)
# 12. Exclude if in QA mode
if self._qaMode:
logger.debug("Skip experiment %s because of QA Mode", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 12.5. If experiment is stopped, return immediately
if experiment.status == "stopped":
logger.debug("Skip experiment %s because it is stopped", experiment.key)
return self._getExperimentResult(experiment, featureId=featureId)
# 13. Build the result object
result = self._getExperimentResult(
experiment, assigned, True, featureId=featureId, bucket=n, stickyBucketUsed=found_sticky_bucket
)
# 13.5 Persist sticky bucket
if self.sticky_bucket_service and not experiment.disableStickyBucketing:
assignment = {}
assignment[self._get_sticky_bucket_experiment_key(
experiment.key,
experiment.bucketVersion
)] = result.key
data = self._generate_sticky_bucket_assignment_doc(
hashAttribute,
hashValue,
assignment
)
doc = data.get("doc", None)
if doc and data.get('changed', False):
if not self._sticky_bucket_assignment_docs:
self._sticky_bucket_assignment_docs = {}
self._sticky_bucket_assignment_docs[data.get('key')] = doc
self.sticky_bucket_service.save_assignments(doc)
# 14. Fire the tracking callback if set
self._track(experiment, result)
# 15. Return the result
logger.debug("Assigned variation %d in experiment %s", assigned, experiment.key)
return result
| (self, experiment: growthbook.Experiment, featureId: Optional[str] = None) -> growthbook.Result |
6,989 | growthbook | _track | null | def _track(self, experiment: Experiment, result: Result) -> None:
if not self._trackingCallback:
return None
key = (
result.hashAttribute
+ str(result.hashValue)
+ experiment.key
+ str(result.variationId)
)
if not self._tracked.get(key):
try:
self._trackingCallback(experiment=experiment, result=result)
self._tracked[key] = True
except Exception:
pass
| (self, experiment: growthbook.Experiment, result: growthbook.Result) -> NoneType |
6,990 | growthbook | _urlIsValid | null | def _urlIsValid(self, pattern) -> bool:
if not self._url:
return False
try:
r = re.compile(pattern)
if r.search(self._url):
return True
pathOnly = re.sub(r"^[^/]*/", "/", re.sub(r"^https?:\/\/", "", self._url))
if r.search(pathOnly):
return True
return False
except Exception:
return True
| (self, pattern) -> bool |
6,991 | growthbook | destroy | null | def destroy(self) -> None:
self._subscriptions.clear()
self._tracked.clear()
self._assigned.clear()
self._trackingCallback = None
self._forcedVariations.clear()
self._overrides.clear()
self._groups.clear()
self._attributes.clear()
self._features.clear()
| (self) -> NoneType |
6,992 | growthbook | evalFeature | null | def evalFeature(self, key: str) -> FeatureResult:
return self.eval_feature(key)
| (self, key: str) -> growthbook.FeatureResult |
6,993 | growthbook | eval_feature | null | def eval_feature(self, key: str) -> FeatureResult:
return self._eval_feature(key, set())
| (self, key: str) -> growthbook.FeatureResult |
6,994 | growthbook | eval_prereqs | null | def eval_prereqs(self, parentConditions: List[dict], stack: Set[str]) -> str:
for parentCondition in parentConditions:
parentRes = self._eval_feature(parentCondition.get("id", None), stack)
if parentRes.source == "cyclicPrerequisite":
return "cyclic"
if not evalCondition({'value': parentRes.value}, parentCondition.get("condition", None)):
if parentCondition.get("gate", False):
return "gate"
return "fail"
return "pass"
| (self, parentConditions: List[dict], stack: Set[str]) -> str |
6,995 | growthbook | getAllResults | null | def getAllResults(self):
return self.get_all_results()
| (self) |
6,996 | growthbook | getAttributes | null | def getAttributes(self) -> dict:
return self.get_attributes()
| (self) -> dict |
6,997 | growthbook | getFeatureValue | null | def getFeatureValue(self, key: str, fallback):
return self.get_feature_value(key, fallback)
| (self, key: str, fallback) |
6,998 | growthbook | getFeatures | null | def getFeatures(self) -> Dict[str, Feature]:
return self.get_features()
| (self) -> Dict[str, growthbook.Feature] |
6,999 | growthbook | get_all_results | null | def get_all_results(self):
return self._assigned.copy()
| (self) |
7,000 | growthbook | get_attributes | null | def get_attributes(self) -> dict:
return self._attributes
| (self) -> dict |
7,001 | growthbook | get_feature_value | null | def get_feature_value(self, key: str, fallback):
res = self.evalFeature(key)
return res.value if res.value is not None else fallback
| (self, key: str, fallback) |
7,002 | growthbook | get_features | null | def get_features(self) -> Dict[str, Feature]:
return self._features
| (self) -> Dict[str, growthbook.Feature] |
7,003 | growthbook | isOff | null | def isOff(self, key: str) -> bool:
return self.is_off(key)
| (self, key: str) -> bool |
7,004 | growthbook | isOn | null | def isOn(self, key: str) -> bool:
return self.is_on(key)
| (self, key: str) -> bool |
7,005 | growthbook | is_off | null | def is_off(self, key: str) -> bool:
return self.evalFeature(key).off
| (self, key: str) -> bool |
7,006 | growthbook | is_on | null | def is_on(self, key: str) -> bool:
return self.evalFeature(key).on
| (self, key: str) -> bool |
7,007 | growthbook | load_features | null | def load_features(self) -> None:
if not self._client_key:
raise ValueError("Must specify `client_key` to refresh features")
features = feature_repo.load_features(
self._api_host, self._client_key, self._decryption_key, self._cache_ttl
)
if features is not None:
self.setFeatures(features)
| (self) -> NoneType |
7,008 | growthbook | refresh_sticky_buckets | null | def refresh_sticky_buckets(self, force: bool = False) -> None:
if not self.sticky_bucket_service:
return
attributes = self._get_sticky_bucket_attributes()
if not force and attributes == self._sticky_bucket_attributes:
logger.debug("Skipping refresh of sticky bucket assignments, no changes")
return
self._sticky_bucket_attributes = attributes
self._sticky_bucket_assignment_docs = self.sticky_bucket_service.get_all_assignments(attributes)
| (self, force: bool = False) -> NoneType |
7,009 | growthbook | run | null | def run(self, experiment: Experiment) -> Result:
result = self._run(experiment)
self._fireSubscriptions(experiment, result)
return result
| (self, experiment: growthbook.Experiment) -> growthbook.Result |
7,010 | growthbook | setAttributes | null | def setAttributes(self, attributes: dict) -> None:
return self.set_attributes(attributes)
| (self, attributes: dict) -> NoneType |
7,011 | growthbook | setFeatures | null | def setFeatures(self, features: dict) -> None:
return self.set_features(features)
| (self, features: dict) -> NoneType |
7,012 | growthbook | set_attributes | null | def set_attributes(self, attributes: dict) -> None:
self._attributes = attributes
self.refresh_sticky_buckets()
| (self, attributes: dict) -> NoneType |
7,013 | growthbook | set_features | null | def set_features(self, features: dict) -> None:
self._features = {}
for key, feature in features.items():
if isinstance(feature, Feature):
self._features[key] = feature
else:
self._features[key] = Feature(
rules=feature.get("rules", []),
defaultValue=feature.get("defaultValue", None),
)
self.refresh_sticky_buckets()
| (self, features: dict) -> NoneType |
7,014 | growthbook | subscribe | null | def subscribe(self, callback):
self._subscriptions.add(callback)
return lambda: self._subscriptions.remove(callback)
| (self, callback) |
7,015 | growthbook | InMemoryFeatureCache | null | class InMemoryFeatureCache(AbstractFeatureCache):
def __init__(self) -> None:
self.cache: Dict[str, CacheEntry] = {}
def get(self, key: str) -> Optional[Dict]:
if key in self.cache:
entry = self.cache[key]
if entry.expires >= time():
return entry.value
return None
def set(self, key: str, value: Dict, ttl: int) -> None:
if key in self.cache:
self.cache[key].update(value)
self.cache[key] = CacheEntry(value, ttl)
def clear(self) -> None:
self.cache.clear()
| () -> None |
7,016 | growthbook | __init__ | null | def __init__(self) -> None:
self.cache: Dict[str, CacheEntry] = {}
| (self) -> NoneType |
7,017 | growthbook | clear | null | def clear(self) -> None:
self.cache.clear()
| (self) -> NoneType |
7,018 | growthbook | get | null | def get(self, key: str) -> Optional[Dict]:
if key in self.cache:
entry = self.cache[key]
if entry.expires >= time():
return entry.value
return None
| (self, key: str) -> Optional[Dict] |
7,019 | growthbook | set | null | def set(self, key: str, value: Dict, ttl: int) -> None:
if key in self.cache:
self.cache[key].update(value)
self.cache[key] = CacheEntry(value, ttl)
| (self, key: str, value: Dict, ttl: int) -> NoneType |
7,020 | growthbook | InMemoryStickyBucketService | null | class InMemoryStickyBucketService(AbstractStickyBucketService):
def __init__(self) -> None:
self.docs: Dict[str, Dict] = {}
def get_assignments(self, attributeName: str, attributeValue: str) -> Optional[Dict]:
return self.docs.get(self.get_key(attributeName, attributeValue), None)
def save_assignments(self, doc: Dict) -> None:
self.docs[self.get_key(doc["attributeName"], doc["attributeValue"])] = doc
def destroy(self) -> None:
self.docs.clear()
| () -> None |
7,021 | growthbook | __init__ | null | def __init__(self) -> None:
self.docs: Dict[str, Dict] = {}
| (self) -> NoneType |
7,022 | growthbook | destroy | null | def destroy(self) -> None:
self.docs.clear()
| (self) -> NoneType |
7,024 | growthbook | get_assignments | null | def get_assignments(self, attributeName: str, attributeValue: str) -> Optional[Dict]:
return self.docs.get(self.get_key(attributeName, attributeValue), None)
| (self, attributeName: str, attributeValue: str) -> Optional[Dict] |
7,026 | growthbook | save_assignments | null | def save_assignments(self, doc: Dict) -> None:
self.docs[self.get_key(doc["attributeName"], doc["attributeValue"])] = doc
| (self, doc: Dict) -> NoneType |
7,027 | urllib3.poolmanager | PoolManager |
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example:
.. code-block:: python
import urllib3
http = urllib3.PoolManager(num_pools=2)
resp1 = http.request("GET", "https://google.com/")
resp2 = http.request("GET", "https://google.com/mail")
resp3 = http.request("GET", "https://yahoo.com/")
print(len(http.pools))
# 2
| class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \\**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example:
.. code-block:: python
import urllib3
http = urllib3.PoolManager(num_pools=2)
resp1 = http.request("GET", "https://google.com/")
resp2 = http.request("GET", "https://google.com/mail")
resp3 = http.request("GET", "https://yahoo.com/")
print(len(http.pools))
# 2
"""
proxy: Url | None = None
proxy_config: ProxyConfig | None = None
def __init__(
self,
num_pools: int = 10,
headers: typing.Mapping[str, str] | None = None,
**connection_pool_kw: typing.Any,
) -> None:
super().__init__(headers)
self.connection_pool_kw = connection_pool_kw
self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
self.pools = RecentlyUsedContainer(num_pools)
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self: _SelfT) -> _SelfT:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> Literal[False]:
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(
self,
scheme: str,
host: str,
port: int,
request_context: dict[str, typing.Any] | None = None,
) -> HTTPConnectionPool:
"""
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
"""
pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme]
if request_context is None:
request_context = self.connection_pool_kw.copy()
# Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly
# set to 'None' in the request_context.
if request_context.get("blocksize") is None:
request_context["blocksize"] = _DEFAULT_BLOCKSIZE
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
for key in ("scheme", "host", "port"):
request_context.pop(key, None)
if scheme == "http":
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
return pool_cls(host, port, **request_context)
def clear(self) -> None:
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(
self,
host: str | None,
port: int | None = None,
scheme: str | None = "http",
pool_kwargs: dict[str, typing.Any] | None = None,
) -> HTTPConnectionPool:
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
request_context["scheme"] = scheme or "http"
if not port:
port = port_by_scheme.get(request_context["scheme"].lower(), 80)
request_context["port"] = port
request_context["host"] = host
return self.connection_from_context(request_context)
def connection_from_context(
self, request_context: dict[str, typing.Any]
) -> HTTPConnectionPool:
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
if "strict" in request_context:
warnings.warn(
"The 'strict' parameter is no longer needed on Python 3+. "
"This will raise an error in urllib3 v2.1.0.",
DeprecationWarning,
)
request_context.pop("strict")
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(
self, pool_key: PoolKey, request_context: dict[str, typing.Any]
) -> HTTPConnectionPool:
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
scheme = request_context["scheme"]
host = request_context["host"]
port = request_context["port"]
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
return pool
def connection_from_url(
self, url: str, pool_kwargs: dict[str, typing.Any] | None = None
) -> HTTPConnectionPool:
"""
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
"""
u = parse_url(url)
return self.connection_from_host(
u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
)
def _merge_pool_kwargs(
self, override: dict[str, typing.Any] | None
) -> dict[str, typing.Any]:
"""
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
"""
base_pool_kwargs = self.connection_pool_kw.copy()
if override:
for key, value in override.items():
if value is None:
try:
del base_pool_kwargs[key]
except KeyError:
pass
else:
base_pool_kwargs[key] = value
return base_pool_kwargs
def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool:
"""
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
"""
if self.proxy is None:
return False
return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
def urlopen( # type: ignore[override]
self, method: str, url: str, redirect: bool = True, **kw: typing.Any
) -> BaseHTTPResponse:
"""
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
if u.scheme is None:
warnings.warn(
"URLs without a scheme (ie 'https://') are deprecated and will raise an error "
"in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs "
"start with 'https://' or 'http://'. Read more in this issue: "
"https://github.com/urllib3/urllib3/issues/2920",
category=DeprecationWarning,
stacklevel=2,
)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
kw["redirect"] = False
if "headers" not in kw:
kw["headers"] = self.headers
if self._proxy_requires_url_absolute_form(u):
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
if response.status == 303:
# Change the method according to RFC 9110, Section 15.4.4.
method = "GET"
# And lose the body not to transfer anything sensitive.
kw["body"] = None
kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
retries = kw.get("retries")
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
# Strip headers marked as unsafe to forward to the redirected location.
# Check remove_headers_on_redirect to avoid a potential network call within
# conn.is_same_host() which may use socket.gethostbyname() in the future.
if retries.remove_headers_on_redirect and not conn.is_same_host(
redirect_location
):
new_headers = kw["headers"].copy()
for header in kw["headers"]:
if header.lower() in retries.remove_headers_on_redirect:
new_headers.pop(header, None)
kw["headers"] = new_headers
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
kw["retries"] = retries
kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
| (num_pools: 'int' = 10, headers: 'typing.Mapping[str, str] | None' = None, **connection_pool_kw: 'typing.Any') -> 'None' |
7,028 | urllib3.poolmanager | __enter__ | null | def __enter__(self: _SelfT) -> _SelfT:
return self
| (self: ~_SelfT) -> ~_SelfT |
7,029 | urllib3.poolmanager | __exit__ | null | def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> Literal[False]:
self.clear()
# Return False to re-raise any potential exceptions
return False
| (self, exc_type: 'type[BaseException] | None', exc_val: 'BaseException | None', exc_tb: 'TracebackType | None') -> 'Literal[False]' |
7,030 | urllib3.poolmanager | __init__ | null | def __init__(
self,
num_pools: int = 10,
headers: typing.Mapping[str, str] | None = None,
**connection_pool_kw: typing.Any,
) -> None:
super().__init__(headers)
self.connection_pool_kw = connection_pool_kw
self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
self.pools = RecentlyUsedContainer(num_pools)
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
| (self, num_pools: int = 10, headers: Optional[Mapping[str, str]] = None, **connection_pool_kw: Any) -> NoneType |
7,031 | urllib3.poolmanager | _merge_pool_kwargs |
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
| def _merge_pool_kwargs(
self, override: dict[str, typing.Any] | None
) -> dict[str, typing.Any]:
"""
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
"""
base_pool_kwargs = self.connection_pool_kw.copy()
if override:
for key, value in override.items():
if value is None:
try:
del base_pool_kwargs[key]
except KeyError:
pass
else:
base_pool_kwargs[key] = value
return base_pool_kwargs
| (self, override: dict[str, typing.Any] | None) -> dict[str, typing.Any] |
7,032 | urllib3.poolmanager | _new_pool |
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
| def _new_pool(
self,
scheme: str,
host: str,
port: int,
request_context: dict[str, typing.Any] | None = None,
) -> HTTPConnectionPool:
"""
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
"""
pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme]
if request_context is None:
request_context = self.connection_pool_kw.copy()
# Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly
# set to 'None' in the request_context.
if request_context.get("blocksize") is None:
request_context["blocksize"] = _DEFAULT_BLOCKSIZE
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
for key in ("scheme", "host", "port"):
request_context.pop(key, None)
if scheme == "http":
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
return pool_cls(host, port, **request_context)
| (self, scheme: str, host: str, port: int, request_context: Optional[dict[str, Any]] = None) -> urllib3.connectionpool.HTTPConnectionPool |
7,033 | urllib3.poolmanager | _proxy_requires_url_absolute_form |
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
| def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool:
"""
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
"""
if self.proxy is None:
return False
return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
| (self, parsed_url: urllib3.util.url.Url) -> bool |
7,034 | urllib3.poolmanager | clear |
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
| def clear(self) -> None:
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
| (self) -> NoneType |
7,035 | urllib3.poolmanager | connection_from_context |
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
| def connection_from_context(
self, request_context: dict[str, typing.Any]
) -> HTTPConnectionPool:
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
if "strict" in request_context:
warnings.warn(
"The 'strict' parameter is no longer needed on Python 3+. "
"This will raise an error in urllib3 v2.1.0.",
DeprecationWarning,
)
request_context.pop("strict")
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
| (self, request_context: dict[str, typing.Any]) -> urllib3.connectionpool.HTTPConnectionPool |
7,036 | urllib3.poolmanager | connection_from_host |
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
| def connection_from_host(
self,
host: str | None,
port: int | None = None,
scheme: str | None = "http",
pool_kwargs: dict[str, typing.Any] | None = None,
) -> HTTPConnectionPool:
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
request_context["scheme"] = scheme or "http"
if not port:
port = port_by_scheme.get(request_context["scheme"].lower(), 80)
request_context["port"] = port
request_context["host"] = host
return self.connection_from_context(request_context)
| (self, host: str | None, port: Optional[int] = None, scheme: str | None = 'http', pool_kwargs: Optional[dict[str, Any]] = None) -> urllib3.connectionpool.HTTPConnectionPool |
7,037 | urllib3.poolmanager | connection_from_pool_key |
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
| def connection_from_pool_key(
self, pool_key: PoolKey, request_context: dict[str, typing.Any]
) -> HTTPConnectionPool:
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
scheme = request_context["scheme"]
host = request_context["host"]
port = request_context["port"]
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
return pool
| (self, pool_key: urllib3.poolmanager.PoolKey, request_context: dict[str, typing.Any]) -> urllib3.connectionpool.HTTPConnectionPool |
7,038 | urllib3.poolmanager | connection_from_url |
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
| def connection_from_url(
self, url: str, pool_kwargs: dict[str, typing.Any] | None = None
) -> HTTPConnectionPool:
"""
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
"""
u = parse_url(url)
return self.connection_from_host(
u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
)
| (self, url: str, pool_kwargs: Optional[dict[str, Any]] = None) -> urllib3.connectionpool.HTTPConnectionPool |
7,039 | urllib3._request_methods | request |
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the
option to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param fields:
Data to encode and send in the request body. Values are processed
by :func:`urllib.parse.urlencode`.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param json:
Data to encode and send as JSON with UTF-encoded in the request body.
The ``"Content-Type"`` header will be set to ``"application/json"``
unless specified otherwise.
| def request(
self,
method: str,
url: str,
body: _TYPE_BODY | None = None,
fields: _TYPE_FIELDS | None = None,
headers: typing.Mapping[str, str] | None = None,
json: typing.Any | None = None,
**urlopen_kw: typing.Any,
) -> BaseHTTPResponse:
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the
option to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param fields:
Data to encode and send in the request body. Values are processed
by :func:`urllib.parse.urlencode`.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param json:
Data to encode and send as JSON with UTF-encoded in the request body.
The ``"Content-Type"`` header will be set to ``"application/json"``
unless specified otherwise.
"""
method = method.upper()
if json is not None and body is not None:
raise TypeError(
"request got values for both 'body' and 'json' parameters which are mutually exclusive"
)
if json is not None:
if headers is None:
headers = self.headers
if not ("content-type" in map(str.lower, headers.keys())):
headers = HTTPHeaderDict(headers)
headers["Content-Type"] = "application/json"
body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode(
"utf-8"
)
if body is not None:
urlopen_kw["body"] = body
if method in self._encode_url_methods:
return self.request_encode_url(
method,
url,
fields=fields, # type: ignore[arg-type]
headers=headers,
**urlopen_kw,
)
else:
return self.request_encode_body(
method, url, fields=fields, headers=headers, **urlopen_kw
)
| (self, method: str, url: str, body: Union[bytes, IO[Any], Iterable[bytes], str, NoneType] = None, fields: Union[Sequence[Union[Tuple[str, Union[str, bytes, Tuple[str, Union[str, bytes]], Tuple[str, Union[str, bytes], str]]], urllib3.fields.RequestField]], Mapping[str, Union[str, bytes, Tuple[str, Union[str, bytes]], Tuple[str, Union[str, bytes], str]]], NoneType] = None, headers: Optional[Mapping[str, str]] = None, json: Optional[Any] = None, **urlopen_kw: Any) -> urllib3.response.BaseHTTPResponse |
7,040 | urllib3._request_methods | request_encode_body |
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:func:`urllib3.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
:func:`urllib.parse.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request
signing, such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimic behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will
be overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param fields:
Data to encode and send in the request body.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param encode_multipart:
If True, encode the ``fields`` using the multipart/form-data MIME
format.
:param multipart_boundary:
If not specified, then a random boundary will be generated using
:func:`urllib3.filepost.choose_boundary`.
| def request_encode_body(
self,
method: str,
url: str,
fields: _TYPE_FIELDS | None = None,
headers: typing.Mapping[str, str] | None = None,
encode_multipart: bool = True,
multipart_boundary: str | None = None,
**urlopen_kw: str,
) -> BaseHTTPResponse:
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:func:`urllib3.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
:func:`urllib.parse.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request
signing, such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimic behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will
be overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param fields:
Data to encode and send in the request body.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param encode_multipart:
If True, encode the ``fields`` using the multipart/form-data MIME
format.
:param multipart_boundary:
If not specified, then a random boundary will be generated using
:func:`urllib3.filepost.choose_boundary`.
"""
if headers is None:
headers = self.headers
extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)}
body: bytes | str
if fields:
if "body" in urlopen_kw:
raise TypeError(
"request got values for both 'fields' and 'body', can only specify one."
)
if encode_multipart:
body, content_type = encode_multipart_formdata(
fields, boundary=multipart_boundary
)
else:
body, content_type = (
urlencode(fields), # type: ignore[arg-type]
"application/x-www-form-urlencoded",
)
extra_kw["body"] = body
extra_kw["headers"].setdefault("Content-Type", content_type)
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
| (self, method: str, url: str, fields: Union[Sequence[Union[Tuple[str, Union[str, bytes, Tuple[str, Union[str, bytes]], Tuple[str, Union[str, bytes], str]]], urllib3.fields.RequestField]], Mapping[str, Union[str, bytes, Tuple[str, Union[str, bytes]], Tuple[str, Union[str, bytes], str]]], NoneType] = None, headers: Optional[Mapping[str, str]] = None, encode_multipart: bool = True, multipart_boundary: Optional[str] = None, **urlopen_kw: str) -> urllib3.response.BaseHTTPResponse |
7,041 | urllib3._request_methods | request_encode_url |
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param fields:
Data to encode and send in the request body.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
| def request_encode_url(
self,
method: str,
url: str,
fields: _TYPE_ENCODE_URL_FIELDS | None = None,
headers: typing.Mapping[str, str] | None = None,
**urlopen_kw: str,
) -> BaseHTTPResponse:
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param fields:
Data to encode and send in the request body.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
"""
if headers is None:
headers = self.headers
extra_kw: dict[str, typing.Any] = {"headers": headers}
extra_kw.update(urlopen_kw)
if fields:
url += "?" + urlencode(fields)
return self.urlopen(method, url, **extra_kw)
| (self, method: str, url: str, fields: Union[Sequence[Tuple[str, Union[str, bytes]]], Mapping[str, Union[str, bytes]], NoneType] = None, headers: Optional[Mapping[str, str]] = None, **urlopen_kw: str) -> urllib3.response.BaseHTTPResponse |
7,042 | urllib3.poolmanager | urlopen |
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
| def urlopen( # type: ignore[override]
self, method: str, url: str, redirect: bool = True, **kw: typing.Any
) -> BaseHTTPResponse:
"""
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
if u.scheme is None:
warnings.warn(
"URLs without a scheme (ie 'https://') are deprecated and will raise an error "
"in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs "
"start with 'https://' or 'http://'. Read more in this issue: "
"https://github.com/urllib3/urllib3/issues/2920",
category=DeprecationWarning,
stacklevel=2,
)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
kw["redirect"] = False
if "headers" not in kw:
kw["headers"] = self.headers
if self._proxy_requires_url_absolute_form(u):
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
if response.status == 303:
# Change the method according to RFC 9110, Section 15.4.4.
method = "GET"
# And lose the body not to transfer anything sensitive.
kw["body"] = None
kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
retries = kw.get("retries")
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
# Strip headers marked as unsafe to forward to the redirected location.
# Check remove_headers_on_redirect to avoid a potential network call within
# conn.is_same_host() which may use socket.gethostbyname() in the future.
if retries.remove_headers_on_redirect and not conn.is_same_host(
redirect_location
):
new_headers = kw["headers"].copy()
for header in kw["headers"]:
if header.lower() in retries.remove_headers_on_redirect:
new_headers.pop(header, None)
kw["headers"] = new_headers
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
kw["retries"] = retries
kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
| (self, method: str, url: str, redirect: bool = True, **kw: Any) -> urllib3.response.BaseHTTPResponse |
7,043 | growthbook | Result | null | class Result(object):
def __init__(
self,
variationId: int,
inExperiment: bool,
value,
hashUsed: bool,
hashAttribute: str,
hashValue: str,
featureId: Optional[str],
meta: VariationMeta = None,
bucket: float = None,
stickyBucketUsed: bool = False,
) -> None:
self.variationId = variationId
self.inExperiment = inExperiment
self.value = value
self.hashUsed = hashUsed
self.hashAttribute = hashAttribute
self.hashValue = hashValue
self.featureId = featureId or None
self.bucket = bucket
self.stickyBucketUsed = stickyBucketUsed
self.key = str(variationId)
self.name = ""
self.passthrough = False
if meta:
if "name" in meta:
self.name = meta["name"]
if "key" in meta:
self.key = meta["key"]
if "passthrough" in meta:
self.passthrough = meta["passthrough"]
def to_dict(self) -> dict:
obj = {
"featureId": self.featureId,
"variationId": self.variationId,
"inExperiment": self.inExperiment,
"value": self.value,
"hashUsed": self.hashUsed,
"hashAttribute": self.hashAttribute,
"hashValue": self.hashValue,
"key": self.key,
"stickyBucketUsed": self.stickyBucketUsed,
}
if self.bucket is not None:
obj["bucket"] = self.bucket
if self.name:
obj["name"] = self.name
if self.passthrough:
obj["passthrough"] = True
return obj
| (variationId: int, inExperiment: bool, value, hashUsed: bool, hashAttribute: str, hashValue: str, featureId: Optional[str], meta: growthbook.VariationMeta = None, bucket: float = None, stickyBucketUsed: bool = False) -> None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.