file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
itSystemService.ts | module Kitos.Services {
"use strict";
interface ISystemRoleModel {
Id: number;
HasReadAccess: boolean;
HasWriteAccess: boolean;
Name: string;
IsActive: boolean;
Description?: any;
ObjectOwnerId: number;
LastChanged: Date;
LastChangedByUserId: number;
}
interface ISystemRightsModel {
Id: number;
UserId: number;
RoleId: number;
ObjectId: number;
ObjectOwnerId: number;
LastChanged: Date;
LastChangedByUserId: number;
}
export class ItSystemService {
public static $inject: string[] = ["$http"];
co | rivate $http: IHttpServiceWithCustomConfig) {
}
GetSystemById = (id: number) => {
return this.$http.get<Models.ItSystem.IItSystem>(`odata/ItSystems(${id})`);
}
GetAllSystems = () => {
return this.$http.get<Models.ItSystem.IItSystem>(`odata/ItSystems`);
}
GetSystemRoleById = (roleId: number) => {
return this.$http.get<ISystemRoleModel>(`odata/ItSystemRoles(${roleId})`);
}
GetAllSystemRoles = () => {
return this.$http.get<ISystemRoleModel>(`odata/ItSystemRoles`);
}
GetSystemRightsById = (id: number) => {
return this.$http.get<ISystemRightsModel>(`odata/ItSystemRights?$filter=UserId eq (${id})`);
}
GetSystemDataById = (id: number) => {
return this.$http.get(`odata/ItSystemRights?$expand=role,object&$filter=UserId eq (${id})`);
}
GetSystemDataByIdFiltered = (id: number, orgId: number) => {
return this.$http
.get(`odata/ItSystemRights?$expand=role($select=Name),object($select=Id;$expand=ItSystem($select=Id,Name))&$filter=Object/OrganizationId eq (${orgId}) AND UserId eq (${id})&$select=Id`);
}
}
app.service("ItSystemService", ItSystemService);
}
| nstructor(p | identifier_name |
http.py | import logging
import ssl
from typing import List # pylint: disable=unused-import
import aiohttp
import certifi
import trio_asyncio
from aiohttp.http_exceptions import HttpProcessingError
from .base import BufferedFree, Limit, Sink, Source
logger = logging.getLogger(__name__)
class AiohttpClientSessionMixin:
def init_client(self, client, headers={}):
ssl_context = ssl.create_default_context(cafile=certifi.where())
conn = aiohttp.TCPConnector(ssl=ssl_context)
if client:
self.client_owned, self.client = False, client
else:
self.client_owned, self.client = True, aiohttp.ClientSession(
connector=conn,
headers=headers,
skip_auto_headers=["Content-Type", "User-Agent"],
)
async def close_client(self):
if self.client_owned and not self.client.closed:
await self.client.close()
DEFAULT_CHUNK_SIZE = 1024 * 10 * 16
class URLReader(Source, AiohttpClientSessionMixin):
def __init__(self, url, client=None):
super(URLReader, self).__init__()
self.url = url
self.response = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._eof:
return b""
if self.response is None:
self.response = await self.client.get(self.url)
self.response.raise_for_status()
if count == -1:
|
buf = await self.response.content.read(count)
if len(buf) == 0:
await self._close()
return buf
async def _close(self):
self._eof = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
@trio_asyncio.aio_as_trio
async def close(self):
await self._close()
class URLWriter(Sink, AiohttpClientSessionMixin):
def __init__(self, url, size=None, client=None):
super(URLWriter, self).__init__()
self.url = url
self._done = False
self.response = None
self.bytes_written = 0
self.size = size
self.etag = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._done:
return b""
if self.response is None:
@trio_asyncio.trio_as_aio
async def read_from_input():
assert self.input is not None
return (await self.input.read())
async def feed_http_upload():
while True:
buf = await read_from_input()
if len(buf) == 0:
break
yield buf
self.bytes_written += len(buf)
logger.debug('HTTP PUT %s', self.url)
self.response = await self.client.put(
self.url,
data=feed_http_upload(),
raise_for_status=True,
headers={} if self.size is None else {"Content-Length": str(self.size)},
)
content = await self.response.read()
await self.response.release()
if not self.response.status in (200, 201, 202):
raise HttpProcessingError(
code=self.response.status,
message=self.response.reason,
headers=self.response.headers,
)
self._done = True
if "ETAG" in self.response.headers:
self.etag = self.response.headers["ETAG"][1:-1]
return content
@trio_asyncio.aio_as_trio
async def close(self):
self._done = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
class ChunkedURLWriter(Sink, AiohttpClientSessionMixin):
"""
The ChunkedURLWriter will instantiate an URLWriter for each URL given to
it.
"""
def __init__(self, urls, chunksize, total_size=None, client=None):
super(ChunkedURLWriter, self).__init__()
self._urls = urls
self._chunksize = chunksize
self._url_idx = 0
self.init_client(client)
self.bytes_written = 0
self.total_size = total_size
self.etags = [] # type: List[str]
def add_input(self, input):
self.input = input >> BufferedFree()
async def read(self, count=-1):
assert self.input is not None
if self._url_idx >= len(self._urls):
return b""
url = self._urls[self._url_idx]
logger.debug("Uploading to: %s (max. %d bytes)", url, self._chunksize)
size = (
None
if self.total_size is None
else min(self.total_size - self.bytes_written, self._chunksize)
)
writer = (
self.input
>> Limit(self._chunksize)
>> URLWriter(url, size=size, client=self.client)
)
result = await writer.readall()
self.etags.append(writer.etag)
self.bytes_written += writer.bytes_written
self._url_idx += 1
return result or b"<empty response>"
@trio_asyncio.aio_as_trio
async def close(self):
await self.close_client()
| count = DEFAULT_CHUNK_SIZE | conditional_block |
http.py | import logging
import ssl
from typing import List # pylint: disable=unused-import
import aiohttp
import certifi
import trio_asyncio
from aiohttp.http_exceptions import HttpProcessingError
from .base import BufferedFree, Limit, Sink, Source
logger = logging.getLogger(__name__)
class AiohttpClientSessionMixin:
def init_client(self, client, headers={}):
ssl_context = ssl.create_default_context(cafile=certifi.where())
conn = aiohttp.TCPConnector(ssl=ssl_context)
if client:
self.client_owned, self.client = False, client
else:
self.client_owned, self.client = True, aiohttp.ClientSession(
connector=conn,
headers=headers,
skip_auto_headers=["Content-Type", "User-Agent"],
)
async def close_client(self):
if self.client_owned and not self.client.closed:
await self.client.close()
DEFAULT_CHUNK_SIZE = 1024 * 10 * 16
class URLReader(Source, AiohttpClientSessionMixin):
def __init__(self, url, client=None):
super(URLReader, self).__init__()
self.url = url
self.response = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def | (self, count=-1):
if self._eof:
return b""
if self.response is None:
self.response = await self.client.get(self.url)
self.response.raise_for_status()
if count == -1:
count = DEFAULT_CHUNK_SIZE
buf = await self.response.content.read(count)
if len(buf) == 0:
await self._close()
return buf
async def _close(self):
self._eof = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
@trio_asyncio.aio_as_trio
async def close(self):
await self._close()
class URLWriter(Sink, AiohttpClientSessionMixin):
def __init__(self, url, size=None, client=None):
super(URLWriter, self).__init__()
self.url = url
self._done = False
self.response = None
self.bytes_written = 0
self.size = size
self.etag = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._done:
return b""
if self.response is None:
@trio_asyncio.trio_as_aio
async def read_from_input():
assert self.input is not None
return (await self.input.read())
async def feed_http_upload():
while True:
buf = await read_from_input()
if len(buf) == 0:
break
yield buf
self.bytes_written += len(buf)
logger.debug('HTTP PUT %s', self.url)
self.response = await self.client.put(
self.url,
data=feed_http_upload(),
raise_for_status=True,
headers={} if self.size is None else {"Content-Length": str(self.size)},
)
content = await self.response.read()
await self.response.release()
if not self.response.status in (200, 201, 202):
raise HttpProcessingError(
code=self.response.status,
message=self.response.reason,
headers=self.response.headers,
)
self._done = True
if "ETAG" in self.response.headers:
self.etag = self.response.headers["ETAG"][1:-1]
return content
@trio_asyncio.aio_as_trio
async def close(self):
self._done = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
class ChunkedURLWriter(Sink, AiohttpClientSessionMixin):
"""
The ChunkedURLWriter will instantiate an URLWriter for each URL given to
it.
"""
def __init__(self, urls, chunksize, total_size=None, client=None):
super(ChunkedURLWriter, self).__init__()
self._urls = urls
self._chunksize = chunksize
self._url_idx = 0
self.init_client(client)
self.bytes_written = 0
self.total_size = total_size
self.etags = [] # type: List[str]
def add_input(self, input):
self.input = input >> BufferedFree()
async def read(self, count=-1):
assert self.input is not None
if self._url_idx >= len(self._urls):
return b""
url = self._urls[self._url_idx]
logger.debug("Uploading to: %s (max. %d bytes)", url, self._chunksize)
size = (
None
if self.total_size is None
else min(self.total_size - self.bytes_written, self._chunksize)
)
writer = (
self.input
>> Limit(self._chunksize)
>> URLWriter(url, size=size, client=self.client)
)
result = await writer.readall()
self.etags.append(writer.etag)
self.bytes_written += writer.bytes_written
self._url_idx += 1
return result or b"<empty response>"
@trio_asyncio.aio_as_trio
async def close(self):
await self.close_client()
| read | identifier_name |
http.py | import logging
import ssl
from typing import List # pylint: disable=unused-import
import aiohttp
import certifi
import trio_asyncio
from aiohttp.http_exceptions import HttpProcessingError
from .base import BufferedFree, Limit, Sink, Source
logger = logging.getLogger(__name__)
class AiohttpClientSessionMixin:
def init_client(self, client, headers={}):
ssl_context = ssl.create_default_context(cafile=certifi.where())
conn = aiohttp.TCPConnector(ssl=ssl_context)
if client:
self.client_owned, self.client = False, client
else:
self.client_owned, self.client = True, aiohttp.ClientSession(
connector=conn,
headers=headers,
skip_auto_headers=["Content-Type", "User-Agent"],
)
async def close_client(self):
if self.client_owned and not self.client.closed:
await self.client.close()
DEFAULT_CHUNK_SIZE = 1024 * 10 * 16
class URLReader(Source, AiohttpClientSessionMixin):
def __init__(self, url, client=None):
super(URLReader, self).__init__()
self.url = url
self.response = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._eof:
return b""
if self.response is None:
self.response = await self.client.get(self.url)
self.response.raise_for_status()
if count == -1:
count = DEFAULT_CHUNK_SIZE
buf = await self.response.content.read(count)
if len(buf) == 0:
await self._close()
return buf
async def _close(self):
self._eof = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
@trio_asyncio.aio_as_trio
async def close(self):
await self._close()
class URLWriter(Sink, AiohttpClientSessionMixin):
def __init__(self, url, size=None, client=None):
super(URLWriter, self).__init__()
self.url = url
self._done = False
self.response = None
self.bytes_written = 0
self.size = size
self.etag = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._done:
return b""
if self.response is None:
@trio_asyncio.trio_as_aio
async def read_from_input():
assert self.input is not None
return (await self.input.read())
async def feed_http_upload():
while True:
buf = await read_from_input()
if len(buf) == 0:
break
yield buf
self.bytes_written += len(buf)
logger.debug('HTTP PUT %s', self.url)
self.response = await self.client.put(
self.url,
data=feed_http_upload(),
raise_for_status=True,
headers={} if self.size is None else {"Content-Length": str(self.size)},
)
content = await self.response.read()
await self.response.release()
if not self.response.status in (200, 201, 202):
raise HttpProcessingError(
code=self.response.status,
message=self.response.reason,
headers=self.response.headers,
)
self._done = True
if "ETAG" in self.response.headers:
self.etag = self.response.headers["ETAG"][1:-1]
return content
@trio_asyncio.aio_as_trio
async def close(self):
self._done = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
class ChunkedURLWriter(Sink, AiohttpClientSessionMixin):
"""
The ChunkedURLWriter will instantiate an URLWriter for each URL given to
it.
"""
def __init__(self, urls, chunksize, total_size=None, client=None):
super(ChunkedURLWriter, self).__init__()
self._urls = urls
self._chunksize = chunksize
self._url_idx = 0
self.init_client(client)
self.bytes_written = 0
self.total_size = total_size
self.etags = [] # type: List[str]
def add_input(self, input):
self.input = input >> BufferedFree()
| url = self._urls[self._url_idx]
logger.debug("Uploading to: %s (max. %d bytes)", url, self._chunksize)
size = (
None
if self.total_size is None
else min(self.total_size - self.bytes_written, self._chunksize)
)
writer = (
self.input
>> Limit(self._chunksize)
>> URLWriter(url, size=size, client=self.client)
)
result = await writer.readall()
self.etags.append(writer.etag)
self.bytes_written += writer.bytes_written
self._url_idx += 1
return result or b"<empty response>"
@trio_asyncio.aio_as_trio
async def close(self):
await self.close_client() | async def read(self, count=-1):
assert self.input is not None
if self._url_idx >= len(self._urls):
return b"" | random_line_split |
http.py | import logging
import ssl
from typing import List # pylint: disable=unused-import
import aiohttp
import certifi
import trio_asyncio
from aiohttp.http_exceptions import HttpProcessingError
from .base import BufferedFree, Limit, Sink, Source
logger = logging.getLogger(__name__)
class AiohttpClientSessionMixin:
def init_client(self, client, headers={}):
ssl_context = ssl.create_default_context(cafile=certifi.where())
conn = aiohttp.TCPConnector(ssl=ssl_context)
if client:
self.client_owned, self.client = False, client
else:
self.client_owned, self.client = True, aiohttp.ClientSession(
connector=conn,
headers=headers,
skip_auto_headers=["Content-Type", "User-Agent"],
)
async def close_client(self):
if self.client_owned and not self.client.closed:
await self.client.close()
DEFAULT_CHUNK_SIZE = 1024 * 10 * 16
class URLReader(Source, AiohttpClientSessionMixin):
def __init__(self, url, client=None):
super(URLReader, self).__init__()
self.url = url
self.response = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._eof:
return b""
if self.response is None:
self.response = await self.client.get(self.url)
self.response.raise_for_status()
if count == -1:
count = DEFAULT_CHUNK_SIZE
buf = await self.response.content.read(count)
if len(buf) == 0:
await self._close()
return buf
async def _close(self):
self._eof = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
@trio_asyncio.aio_as_trio
async def close(self):
await self._close()
class URLWriter(Sink, AiohttpClientSessionMixin):
def __init__(self, url, size=None, client=None):
super(URLWriter, self).__init__()
self.url = url
self._done = False
self.response = None
self.bytes_written = 0
self.size = size
self.etag = None
self.init_client(client)
@trio_asyncio.aio_as_trio
async def read(self, count=-1):
if self._done:
return b""
if self.response is None:
@trio_asyncio.trio_as_aio
async def read_from_input():
|
async def feed_http_upload():
while True:
buf = await read_from_input()
if len(buf) == 0:
break
yield buf
self.bytes_written += len(buf)
logger.debug('HTTP PUT %s', self.url)
self.response = await self.client.put(
self.url,
data=feed_http_upload(),
raise_for_status=True,
headers={} if self.size is None else {"Content-Length": str(self.size)},
)
content = await self.response.read()
await self.response.release()
if not self.response.status in (200, 201, 202):
raise HttpProcessingError(
code=self.response.status,
message=self.response.reason,
headers=self.response.headers,
)
self._done = True
if "ETAG" in self.response.headers:
self.etag = self.response.headers["ETAG"][1:-1]
return content
@trio_asyncio.aio_as_trio
async def close(self):
self._done = True
if not self.response is None:
await self.response.release()
self.response = None
await self.close_client()
class ChunkedURLWriter(Sink, AiohttpClientSessionMixin):
"""
The ChunkedURLWriter will instantiate an URLWriter for each URL given to
it.
"""
def __init__(self, urls, chunksize, total_size=None, client=None):
super(ChunkedURLWriter, self).__init__()
self._urls = urls
self._chunksize = chunksize
self._url_idx = 0
self.init_client(client)
self.bytes_written = 0
self.total_size = total_size
self.etags = [] # type: List[str]
def add_input(self, input):
self.input = input >> BufferedFree()
async def read(self, count=-1):
assert self.input is not None
if self._url_idx >= len(self._urls):
return b""
url = self._urls[self._url_idx]
logger.debug("Uploading to: %s (max. %d bytes)", url, self._chunksize)
size = (
None
if self.total_size is None
else min(self.total_size - self.bytes_written, self._chunksize)
)
writer = (
self.input
>> Limit(self._chunksize)
>> URLWriter(url, size=size, client=self.client)
)
result = await writer.readall()
self.etags.append(writer.etag)
self.bytes_written += writer.bytes_written
self._url_idx += 1
return result or b"<empty response>"
@trio_asyncio.aio_as_trio
async def close(self):
await self.close_client()
| assert self.input is not None
return (await self.input.read()) | identifier_body |
terminalEditorSerializer.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI } from 'vs/base/common/uri';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IEditorSerializer } from 'vs/workbench/common/editor';
import { EditorInput } from 'vs/workbench/common/editor/editorInput';
import { ISerializedTerminalEditorInput, ITerminalEditorService, ITerminalInstance } from 'vs/workbench/contrib/terminal/browser/terminal';
import { TerminalEditorInput } from 'vs/workbench/contrib/terminal/browser/terminalEditorInput';
export class TerminalInputSerializer implements IEditorSerializer {
constructor(
@ITerminalEditorService private readonly _terminalEditorService: ITerminalEditorService
) { }
public canSerialize(editorInput: TerminalEditorInput): boolean {
return !!editorInput.terminalInstance?.persistentProcessId;
}
public | (editorInput: TerminalEditorInput): string | undefined {
if (!editorInput.terminalInstance?.persistentProcessId || !editorInput.terminalInstance.shouldPersist) {
return;
}
const term = JSON.stringify(this._toJson(editorInput.terminalInstance));
return term;
}
public deserialize(instantiationService: IInstantiationService, serializedEditorInput: string): EditorInput | undefined {
const terminalInstance = JSON.parse(serializedEditorInput);
terminalInstance.resource = URI.parse(terminalInstance.resource);
return this._terminalEditorService.reviveInput(terminalInstance);
}
private _toJson(instance: ITerminalInstance): ISerializedTerminalEditorInput {
return {
id: instance.persistentProcessId!,
pid: instance.processId || 0,
title: instance.title,
titleSource: instance.titleSource,
cwd: '',
icon: instance.icon,
color: instance.color,
resource: instance.resource.toString(),
hasChildProcesses: instance.hasChildProcesses
};
}
}
| serialize | identifier_name |
terminalEditorSerializer.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI } from 'vs/base/common/uri';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IEditorSerializer } from 'vs/workbench/common/editor';
import { EditorInput } from 'vs/workbench/common/editor/editorInput';
import { ISerializedTerminalEditorInput, ITerminalEditorService, ITerminalInstance } from 'vs/workbench/contrib/terminal/browser/terminal';
import { TerminalEditorInput } from 'vs/workbench/contrib/terminal/browser/terminalEditorInput';
export class TerminalInputSerializer implements IEditorSerializer {
constructor(
@ITerminalEditorService private readonly _terminalEditorService: ITerminalEditorService
) { }
public canSerialize(editorInput: TerminalEditorInput): boolean {
return !!editorInput.terminalInstance?.persistentProcessId;
}
public serialize(editorInput: TerminalEditorInput): string | undefined {
if (!editorInput.terminalInstance?.persistentProcessId || !editorInput.terminalInstance.shouldPersist) |
const term = JSON.stringify(this._toJson(editorInput.terminalInstance));
return term;
}
public deserialize(instantiationService: IInstantiationService, serializedEditorInput: string): EditorInput | undefined {
const terminalInstance = JSON.parse(serializedEditorInput);
terminalInstance.resource = URI.parse(terminalInstance.resource);
return this._terminalEditorService.reviveInput(terminalInstance);
}
private _toJson(instance: ITerminalInstance): ISerializedTerminalEditorInput {
return {
id: instance.persistentProcessId!,
pid: instance.processId || 0,
title: instance.title,
titleSource: instance.titleSource,
cwd: '',
icon: instance.icon,
color: instance.color,
resource: instance.resource.toString(),
hasChildProcesses: instance.hasChildProcesses
};
}
}
| {
return;
} | conditional_block |
terminalEditorSerializer.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI } from 'vs/base/common/uri';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IEditorSerializer } from 'vs/workbench/common/editor';
import { EditorInput } from 'vs/workbench/common/editor/editorInput';
import { ISerializedTerminalEditorInput, ITerminalEditorService, ITerminalInstance } from 'vs/workbench/contrib/terminal/browser/terminal';
import { TerminalEditorInput } from 'vs/workbench/contrib/terminal/browser/terminalEditorInput';
export class TerminalInputSerializer implements IEditorSerializer {
constructor(
@ITerminalEditorService private readonly _terminalEditorService: ITerminalEditorService
) { }
public canSerialize(editorInput: TerminalEditorInput): boolean {
return !!editorInput.terminalInstance?.persistentProcessId;
}
public serialize(editorInput: TerminalEditorInput): string | undefined {
if (!editorInput.terminalInstance?.persistentProcessId || !editorInput.terminalInstance.shouldPersist) {
return;
}
const term = JSON.stringify(this._toJson(editorInput.terminalInstance));
return term;
}
public deserialize(instantiationService: IInstantiationService, serializedEditorInput: string): EditorInput | undefined { | terminalInstance.resource = URI.parse(terminalInstance.resource);
return this._terminalEditorService.reviveInput(terminalInstance);
}
private _toJson(instance: ITerminalInstance): ISerializedTerminalEditorInput {
return {
id: instance.persistentProcessId!,
pid: instance.processId || 0,
title: instance.title,
titleSource: instance.titleSource,
cwd: '',
icon: instance.icon,
color: instance.color,
resource: instance.resource.toString(),
hasChildProcesses: instance.hasChildProcesses
};
}
} | const terminalInstance = JSON.parse(serializedEditorInput); | random_line_split |
terminalEditorSerializer.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI } from 'vs/base/common/uri';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { IEditorSerializer } from 'vs/workbench/common/editor';
import { EditorInput } from 'vs/workbench/common/editor/editorInput';
import { ISerializedTerminalEditorInput, ITerminalEditorService, ITerminalInstance } from 'vs/workbench/contrib/terminal/browser/terminal';
import { TerminalEditorInput } from 'vs/workbench/contrib/terminal/browser/terminalEditorInput';
export class TerminalInputSerializer implements IEditorSerializer {
constructor(
@ITerminalEditorService private readonly _terminalEditorService: ITerminalEditorService
) |
public canSerialize(editorInput: TerminalEditorInput): boolean {
return !!editorInput.terminalInstance?.persistentProcessId;
}
public serialize(editorInput: TerminalEditorInput): string | undefined {
if (!editorInput.terminalInstance?.persistentProcessId || !editorInput.terminalInstance.shouldPersist) {
return;
}
const term = JSON.stringify(this._toJson(editorInput.terminalInstance));
return term;
}
public deserialize(instantiationService: IInstantiationService, serializedEditorInput: string): EditorInput | undefined {
const terminalInstance = JSON.parse(serializedEditorInput);
terminalInstance.resource = URI.parse(terminalInstance.resource);
return this._terminalEditorService.reviveInput(terminalInstance);
}
private _toJson(instance: ITerminalInstance): ISerializedTerminalEditorInput {
return {
id: instance.persistentProcessId!,
pid: instance.processId || 0,
title: instance.title,
titleSource: instance.titleSource,
cwd: '',
icon: instance.icon,
color: instance.color,
resource: instance.resource.toString(),
hasChildProcesses: instance.hasChildProcesses
};
}
}
| { } | identifier_body |
principal.py | from office365.runtime.paths.service_operation import ServiceOperationPath
from office365.sharepoint.base_entity import BaseEntity
class Principal(BaseEntity):
"""Represents a user or group that can be assigned permissions to control security."""
@property
def id(self):
"""Gets a value that specifies the member identifier for the user or group.
:rtype: int or None
"""
return self.properties.get('Id', None)
@property
def title(self):
"""Gets a value that specifies the name of the principal.
:rtype: str or None
"""
return self.properties.get('Title', None)
@title.setter
def title(self, value):
self.set_property('Title', value)
@property
def login_name(self):
"""Gets the login name of the principal.
:rtype: str or None
"""
return self.properties.get('LoginName', None)
@property
def user_principal_name(self):
"""Gets the UPN of the principal.
:rtype: str or None
"""
return self.properties.get('UserPrincipalName', None)
| :rtype: bool or None
"""
return self.properties.get('IsHiddenInUI', None)
@property
def principal_type(self):
"""Gets the login name of the principal.
:rtype: int or None
"""
return self.properties.get('PrincipalType', None)
def set_property(self, name, value, persist_changes=True):
super(Principal, self).set_property(name, value, persist_changes)
# fallback: create a new resource path
if self._resource_path is None:
if name == "Id":
self._resource_path = ServiceOperationPath(
"GetById", [value], self._parent_collection.resource_path)
elif name == "LoginName":
self._resource_path = ServiceOperationPath(
"GetByName", [value], self._parent_collection.resource_path)
return self | @property
def is_hidden_in_ui(self):
"""Gets the login name of the principal.
| random_line_split |
principal.py | from office365.runtime.paths.service_operation import ServiceOperationPath
from office365.sharepoint.base_entity import BaseEntity
class Principal(BaseEntity):
"""Represents a user or group that can be assigned permissions to control security."""
@property
def id(self):
"""Gets a value that specifies the member identifier for the user or group.
:rtype: int or None
"""
return self.properties.get('Id', None)
@property
def title(self):
"""Gets a value that specifies the name of the principal.
:rtype: str or None
"""
return self.properties.get('Title', None)
@title.setter
def title(self, value):
self.set_property('Title', value)
@property
def login_name(self):
"""Gets the login name of the principal.
:rtype: str or None
"""
return self.properties.get('LoginName', None)
@property
def user_principal_name(self):
"""Gets the UPN of the principal.
:rtype: str or None
"""
return self.properties.get('UserPrincipalName', None)
@property
def is_hidden_in_ui(self):
"""Gets the login name of the principal.
:rtype: bool or None
"""
return self.properties.get('IsHiddenInUI', None)
@property
def | (self):
"""Gets the login name of the principal.
:rtype: int or None
"""
return self.properties.get('PrincipalType', None)
def set_property(self, name, value, persist_changes=True):
super(Principal, self).set_property(name, value, persist_changes)
# fallback: create a new resource path
if self._resource_path is None:
if name == "Id":
self._resource_path = ServiceOperationPath(
"GetById", [value], self._parent_collection.resource_path)
elif name == "LoginName":
self._resource_path = ServiceOperationPath(
"GetByName", [value], self._parent_collection.resource_path)
return self
| principal_type | identifier_name |
principal.py | from office365.runtime.paths.service_operation import ServiceOperationPath
from office365.sharepoint.base_entity import BaseEntity
class Principal(BaseEntity):
"""Represents a user or group that can be assigned permissions to control security."""
@property
def id(self):
"""Gets a value that specifies the member identifier for the user or group.
:rtype: int or None
"""
return self.properties.get('Id', None)
@property
def title(self):
"""Gets a value that specifies the name of the principal.
:rtype: str or None
"""
return self.properties.get('Title', None)
@title.setter
def title(self, value):
self.set_property('Title', value)
@property
def login_name(self):
"""Gets the login name of the principal.
:rtype: str or None
"""
return self.properties.get('LoginName', None)
@property
def user_principal_name(self):
"""Gets the UPN of the principal.
:rtype: str or None
"""
return self.properties.get('UserPrincipalName', None)
@property
def is_hidden_in_ui(self):
"""Gets the login name of the principal.
:rtype: bool or None
"""
return self.properties.get('IsHiddenInUI', None)
@property
def principal_type(self):
|
def set_property(self, name, value, persist_changes=True):
super(Principal, self).set_property(name, value, persist_changes)
# fallback: create a new resource path
if self._resource_path is None:
if name == "Id":
self._resource_path = ServiceOperationPath(
"GetById", [value], self._parent_collection.resource_path)
elif name == "LoginName":
self._resource_path = ServiceOperationPath(
"GetByName", [value], self._parent_collection.resource_path)
return self
| """Gets the login name of the principal.
:rtype: int or None
"""
return self.properties.get('PrincipalType', None) | identifier_body |
principal.py | from office365.runtime.paths.service_operation import ServiceOperationPath
from office365.sharepoint.base_entity import BaseEntity
class Principal(BaseEntity):
"""Represents a user or group that can be assigned permissions to control security."""
@property
def id(self):
"""Gets a value that specifies the member identifier for the user or group.
:rtype: int or None
"""
return self.properties.get('Id', None)
@property
def title(self):
"""Gets a value that specifies the name of the principal.
:rtype: str or None
"""
return self.properties.get('Title', None)
@title.setter
def title(self, value):
self.set_property('Title', value)
@property
def login_name(self):
"""Gets the login name of the principal.
:rtype: str or None
"""
return self.properties.get('LoginName', None)
@property
def user_principal_name(self):
"""Gets the UPN of the principal.
:rtype: str or None
"""
return self.properties.get('UserPrincipalName', None)
@property
def is_hidden_in_ui(self):
"""Gets the login name of the principal.
:rtype: bool or None
"""
return self.properties.get('IsHiddenInUI', None)
@property
def principal_type(self):
"""Gets the login name of the principal.
:rtype: int or None
"""
return self.properties.get('PrincipalType', None)
def set_property(self, name, value, persist_changes=True):
super(Principal, self).set_property(name, value, persist_changes)
# fallback: create a new resource path
if self._resource_path is None:
|
return self
| if name == "Id":
self._resource_path = ServiceOperationPath(
"GetById", [value], self._parent_collection.resource_path)
elif name == "LoginName":
self._resource_path = ServiceOperationPath(
"GetByName", [value], self._parent_collection.resource_path) | conditional_block |
lib.rs | //! natural_constants: a collection of constants and helper functions
//!
//! Written by Willi Kappler, Version 0.1 (2017.02.20)
//!
//! Repository: https://github.com/willi-kappler/natural_constants
//!
//! License: MIT
//!
//!
//! # Example:
//!
//! ```
//! extern crate natural_constants; | //!
//! fn main() {
//! let c = speed_of_light_vac;
//! let m0 = 100.0;
//!
//! // Use c in your code:
//! let E = m0 * c * c;
//! }
//! ```
// For clippy
// #![feature(plugin)]
//
// #![plugin(clippy)]
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
pub mod math;
pub mod physics;
pub mod chemistry;
pub mod biology;
pub mod engineering;
pub mod conversion;
pub mod misc;
pub mod geosciences; | //! use natural_constants::physics::*;
//! | random_line_split |
strings.js | define({
"_widgetLabel": "適合性モデラー",
"general": {
"clear": "消去",
"cancel": "キャンセル",
"save": "実行",
"saveAs": "エクスポート"
},
"saveModel": {
"save": "エクスポート",
"title": "タイトル: ",
"summary": "サマリー: ",
"description": "説明: ",
"tags": "タグ: ",
"folder": "フォルダー: ",
"homeFolderPattern": "{username} (ホーム)",
"failed": "エクスポートできません。"
},
"util": {
"colorRamp": {
"1": "極めて低い",
"2": "非常に低い",
"3": "低",
"4": "やや低い",
"5": "中",
"6": "やや高い",
"7": "高",
"8": "非常に高い",
"9": "極めて高い",
"low": "低",
"high": "高",
"tipPattern": "{label} ({value})",
"flipCaption": "反転"
}
},
"wro": {
"caption": "適合性モデラー",
"browsePrompt": "加重オーバーレイ サービス",
"selectLayersCaption": "レイヤーの選択",
"selectLayers": "レイヤー",
"designModelCaption": "モデルの設計",
"designModel": "モデルの設計",
"transparency": "透過表示",
"visible": "表示",
"total": "合計",
"unableToLoad": "モデルを読み込めません。", | "validating": "整合チェックしています...",
"invalidItemCaption": "加重オーバーレイ サービスの警告",
"notAnImageService": "このアイテムはイメージ サービスではありません。",
"notAWroService": "このアイテムは加重オーバーレイ サービスではありません。",
"undefinedUrl": "このアイテムの URL が定義されていません。",
"inaccessible": "サービスにアクセスできません。",
"generalError": "アイテムを開けません。",
"missingFieldPattern": "{field} は必須フィールドです。",
"notAllowRasterFunction": "[allowRasterFunction] を [true] に設定する必要があります。",
"notNearestResampling": "[defaultResamplingMethod] を [最近隣内挿法] に設定する必要があります。",
"notIsWeightedOverlayProp": "主要プロパティの [IsWeightedOverlay] を [true] に設定する必要があります。",
"invalidLink": "URL が無効です。 選択したレイヤーのサイトを開けませんでした。",
"unexpectedError": "予期しない状態が発生しました。",
"rangeMessage": "値は ${0} ~ ${1} の間でなければなりません",
"rangeMessage100": "値は 0 ~ 100 の間でなければなりません",
"maxLayers": "サービスで許可されているレイヤーの最大数は ${0} です。新しいレイヤーを追加するには、レイヤーを削除する必要があります。",
"notFound": "加重オーバーレイ サービスでレイヤー ${0} が見つかりません",
"wroServiceNotDefined": "モデルに加重オーバーレイ サービスが定義されていません。",
"overlayLayerOutputInvalid": "オーバーレイ レイヤー [${0}] の再分類範囲 [${1}] の出力値がない、または無効です",
"overlayLayerInputInvalid": "オーバーレイ レイヤー [${0}] の再分類範囲 [${1}] の入力最小値/最大値がない、または無効です",
"overlayLayerRangesMissing": "オーバーレイ レイヤー [${0}] に再分類範囲がありません",
"overlayLayerWeight": "オーバーレイ レイヤーの加重を合計 100 にする必要があります",
"overlayLayerRequired": "少なくとも 1 つのオーバーレイ レイヤーが必要です",
"overlayLayerNotDefined": "オーバーレイ レイヤーが定義されていません",
"requiresColormap": "このラスター関数にはカラーマップが必要ですが、モデルに有効なカラーマップ定義がありません",
"createModelError": "モデルの作成中にエラーが発生しました",
"invalidModel": "モデルが有効ではありません",
"imageServiceNotDefined": "イメージ サービス レイヤーが定義されていません",
"imageLayerNotDefined": "イメージ サービス レイヤーが定義されていません",
"histogramNotDefined": "加重オーバーレイ ヒストグラム関数が定義されていません。"
},
"colorRampLabel": {
"Green Yellow Red": "緑 黄 赤",
"Red Yellow Green": "赤 黄 緑",
"Yellow to Dark Red": "黄から濃い赤",
"Dark Red to Yellow": "濃い赤から黄",
"Light Gray to Dark Gray": "ライト グレーからダーク グレー",
"Dark Gray to Light Gray": "ダーク グレーからライト グレー",
"Light Brown to Dark Brown": "ライト ブラウンからダーク ブラウン",
"Dark Brown to Light Brown": "ダーク ブラウンからライト ブラウン",
"Full Spectrum - Bright Red to Blue": "フル スペクトル - 明るい赤から青",
"Full Spectrum - Bright Blue to Red": "フル スペクトル - 明るい青から赤",
"Partial Spectrum - Yellow to Blue": "部分スペクトル - 黄から青",
"Partial Spectrum - Blue to Yellow": "部分スペクトル - 青から黄",
"Yellow-Green to Dark Blue": "黄緑から濃い青",
"Dark Blue to Yellow-Green": "濃い青から黄緑",
"Cold to Hot Diverging": "寒色から暖色に分散",
"Hot to Cold Diverging": "暖色から寒色に分散",
"Surface - Low to High": "サーフェス - 低から高",
"Surface - High to Low": "サーフェス - 高から低"
}
},
"tabs": {
"layers": "レイヤー",
"model": "モデル",
"chart": "チャート"
},
"chart": {
"prompt": "機能",
"working": "更新しています...",
"polygonTool": "ポリゴンを描画",
"freehandPolygonTool": "フリーハンド ポリゴンの描画",
"selectTool": "レイヤーから選択",
"panTool": "Pan",
"clearButton": "解除",
"noModelLayer": "モデルなし",
"noSubjectLayers": "マップにポリゴン レイヤーがありません。",
"tipPattern": "${category} - ${label}: ${percent}%",
"tipPattern2": "${category}: ${percent}%",
"labelPattern": "${category} - ${label}"
}
}); | "projectNotOpen": "プロジェクトが開いていません。",
"readMore": "詳細",
"validation": { | random_line_split |
vehicle-features.service.ts | import * as mongodb from 'mongodb';
import Constants from '../../constants';
import { MongoIdMapperService } from '../mongo-id-mapper.service';
import { VehicleFeature } from './vehicle-feature.interface';
export class VehicleFeatureService {
constructor(private mongoIdMapperService: MongoIdMapperService) { }
public async create(feature: VehicleFeature): Promise<string> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
const existing = await collection.findOne({ name: feature.name } as VehicleFeature);
if (existing) {
throw new Error('Cannot insert duplicate feature.');
}
const inserted = await collection.insertOne(feature);
return inserted.insertedId.toString();
}
public async get(): Promise<VehicleFeature[]> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return await collection.find().map((feature) => this.mongoIdMapperService.map(feature) as VehicleFeature).toArray();
}
public async getById(id: string): Promise<VehicleFeature> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return this.mongoIdMapperService.map(await collection.findOne({ _id: new mongodb.ObjectID(id) })) as VehicleFeature;
}
private async getConnection(): Promise<mongodb.Db> |
}
| {
return await mongodb.MongoClient.connect(Constants.data.mongoUrl);
} | identifier_body |
vehicle-features.service.ts | import * as mongodb from 'mongodb';
import Constants from '../../constants';
import { MongoIdMapperService } from '../mongo-id-mapper.service';
import { VehicleFeature } from './vehicle-feature.interface';
export class VehicleFeatureService {
constructor(private mongoIdMapperService: MongoIdMapperService) { }
public async create(feature: VehicleFeature): Promise<string> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
const existing = await collection.findOne({ name: feature.name } as VehicleFeature);
if (existing) |
const inserted = await collection.insertOne(feature);
return inserted.insertedId.toString();
}
public async get(): Promise<VehicleFeature[]> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return await collection.find().map((feature) => this.mongoIdMapperService.map(feature) as VehicleFeature).toArray();
}
public async getById(id: string): Promise<VehicleFeature> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return this.mongoIdMapperService.map(await collection.findOne({ _id: new mongodb.ObjectID(id) })) as VehicleFeature;
}
private async getConnection(): Promise<mongodb.Db> {
return await mongodb.MongoClient.connect(Constants.data.mongoUrl);
}
}
| {
throw new Error('Cannot insert duplicate feature.');
} | conditional_block |
vehicle-features.service.ts | import * as mongodb from 'mongodb';
import Constants from '../../constants';
import { MongoIdMapperService } from '../mongo-id-mapper.service';
import { VehicleFeature } from './vehicle-feature.interface';
export class VehicleFeatureService {
constructor(private mongoIdMapperService: MongoIdMapperService) { }
public async create(feature: VehicleFeature): Promise<string> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
const existing = await collection.findOne({ name: feature.name } as VehicleFeature);
if (existing) {
throw new Error('Cannot insert duplicate feature.');
} |
return inserted.insertedId.toString();
}
public async get(): Promise<VehicleFeature[]> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return await collection.find().map((feature) => this.mongoIdMapperService.map(feature) as VehicleFeature).toArray();
}
public async getById(id: string): Promise<VehicleFeature> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return this.mongoIdMapperService.map(await collection.findOne({ _id: new mongodb.ObjectID(id) })) as VehicleFeature;
}
private async getConnection(): Promise<mongodb.Db> {
return await mongodb.MongoClient.connect(Constants.data.mongoUrl);
}
} |
const inserted = await collection.insertOne(feature); | random_line_split |
vehicle-features.service.ts | import * as mongodb from 'mongodb';
import Constants from '../../constants';
import { MongoIdMapperService } from '../mongo-id-mapper.service';
import { VehicleFeature } from './vehicle-feature.interface';
export class VehicleFeatureService {
constructor(private mongoIdMapperService: MongoIdMapperService) { }
public async create(feature: VehicleFeature): Promise<string> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
const existing = await collection.findOne({ name: feature.name } as VehicleFeature);
if (existing) {
throw new Error('Cannot insert duplicate feature.');
}
const inserted = await collection.insertOne(feature);
return inserted.insertedId.toString();
}
public async get(): Promise<VehicleFeature[]> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return await collection.find().map((feature) => this.mongoIdMapperService.map(feature) as VehicleFeature).toArray();
}
public async getById(id: string): Promise<VehicleFeature> {
const db = await this.getConnection();
const collection = db.collection(Constants.data.vehicleFeatureCollection);
return this.mongoIdMapperService.map(await collection.findOne({ _id: new mongodb.ObjectID(id) })) as VehicleFeature;
}
private async | (): Promise<mongodb.Db> {
return await mongodb.MongoClient.connect(Constants.data.mongoUrl);
}
}
| getConnection | identifier_name |
Header.tsx | from '../lib/media';
import { useMediaQuery } from '../lib/media';
import { SearchField } from './primitives/SearchField';
import { Highlight } from './primitives/Highlight';
import { Wrapper } from './primitives/Wrapper';
import { Hamburger } from './icons/Hamburger';
import { Button } from './primitives/Button';
import { NavItem } from './docs/Navigation';
import { DarkModeBtn } from './DarkModeBtn';
import { Keystone } from './icons/Keystone';
import { MobileMenu } from './MobileMenu';
import { GitHub } from './icons/GitHub';
// TODO: Add in search for mobile via this button
// import { Search } from './icons/Search';
type HeaderContextType = { mobileNavIsOpen: boolean; desktopOpenState: number };
const HeaderContext = createContext<HeaderContextType>({
mobileNavIsOpen: false,
desktopOpenState: -1,
});
export const useHeaderContext = () => useContext(HeaderContext);
function Logo() {
const mq = useMediaQuery();
return (
<div
css={mq({
marginRight: [0, null, null, null, '1rem'],
marginTop: '0.1rem',
whiteSpace: 'nowrap',
})}
>
<Link href="/" passHref>
<a
css={{
fontSize: 'var(--font-medium)',
fontWeight: 600,
verticalAlign: 'middle',
transition: 'color 0.3s ease',
}}
>
<Keystone
grad="logo"
css={{
display: 'inline-block',
width: '2rem',
height: '2rem',
margin: '0 var(--space-medium) var(--space-xsmall) 0',
verticalAlign: 'middle',
}}
/>
<Highlight>Keystone 6</Highlight>
</a>
</Link>
</div>
);
}
function useCurrentSection() {
const { pathname } = useRouter();
const check = (candidate: string) => pathname.startsWith(candidate);
if (['/updates', '/releases'].some(check)) return '/updates';
if (['/why-keystone', '/for-'].some(check)) return '/why-keystone';
if (['/docs'].some(check)) return '/docs';
}
function LinkItem({ children, href }: { children: ReactNode; href: string }) {
const mq = useMediaQuery();
const currentSection = useCurrentSection();
const isActive = href === currentSection;
return (
<span css={mq({ display: ['none', 'inline'], fontWeight: 600 })}>
<NavItem
isActive={isActive}
alwaysVisible
href={href}
css={{
padding: '0 !important',
}}
>
{children}
</NavItem>
</span>
);
}
export function Header() {
const mq = useMediaQuery();
const router = useRouter();
const menuRef = useRef<HTMLDivElement>(null);
const headerRef = useRef<HTMLElement>(null);
const [mobileNavIsOpen, setMobileNavIsOpen] = useState(false);
const [desktopOpenState, setDesktopOpenState] = useState(-1);
useEffect(() => {
const listener = () => {
setMobileNavIsOpen(false);
setDesktopOpenState(-1);
const width = Math.max(
document.body.scrollWidth,
document.documentElement.scrollWidth,
document.body.offsetWidth,
document.documentElement.offsetWidth,
document.documentElement.clientWidth
);
if (width > BREAK_POINTS.sm) {
setDesktopOpenState(-1);
} else |
};
window.addEventListener('resize', debounce(listener, 130));
return () => {
window.removeEventListener('resize', debounce(listener, 130));
};
}, [setDesktopOpenState]);
useEffect(() => {
document.body.style.overflow = 'auto';
// search - init field
let searchAttempt = 0;
// @ts-ignore
document.getElementById('search-field').disabled = true;
const loadSearch = (searchAttempt: number) => {
// @ts-ignore
if (window.docsearch && searchAttempt < 10) {
// @ts-ignore
document.getElementById('search-field').disabled = false;
// @ts-ignore
window.docsearch({
apiKey: '211e94c001e6b4c6744ae72fb252eaba',
indexName: 'keystonejs',
inputSelector: '#search-field',
algoliaOptions: {
facetFilters: ['tags:stable'],
},
transformData: (results: any) => {
if (window.location.hostname == 'keystonejs.com') return results;
return results.map((result: object) => {
// @ts-ignore
result.url = result.url.replace('https://keystonejs.com', window.location.origin);
return result;
});
},
});
} else if (searchAttempt >= 10) {
// @ts-ignore
document.getElementById('search-field-container').style.visibility = 'hidden';
} else {
setTimeout(() => loadSearch(searchAttempt++), 500);
}
};
loadSearch(searchAttempt);
// search - keyboard shortcut
let keysPressed = {};
document.body.addEventListener('keydown', event => {
// @ts-ignore
keysPressed[event.key] = true;
// @ts-ignore
if (keysPressed['Meta'] && event.key == 'k') {
event.preventDefault();
document.getElementById('search-field')?.focus();
}
});
document.body.addEventListener('keyup', event => {
// @ts-ignore
delete keysPressed[event.key];
});
}, []);
const handleOpen = useCallback(() => {
setMobileNavIsOpen(true);
document.body.style.overflow = 'hidden';
document.getElementById('mobile-menu-close-btn')?.focus();
}, []);
const handleClose = useCallback(() => {
setMobileNavIsOpen(false);
document.body.style.overflow = 'auto';
document.getElementById('skip-link-navigation-btn')?.focus();
}, []);
useEffect(() => {
router.events.on('routeChangeComplete', handleClose);
return () => {
router.events.off('routeChangeComplete', handleClose);
};
}, [router.events, handleClose]);
return (
<header ref={headerRef}>
<Wrapper
css={mq({
display: 'grid',
gridTemplateColumns: [
'auto max-content max-content max-content',
'auto max-content max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content max-content',
'15rem auto max-content max-content max-content max-content max-content max-content',
],
gap: [
'var(--space-medium)',
'var(--space-large)',
'var(--space-medium)',
'var(--space-large)',
'var(--space-xlarge)',
],
justifyItems: 'start',
alignItems: 'center',
paddingTop: 'var(--space-xlarge)',
paddingBottom: 'var(--space-xlarge)',
color: 'var(--muted)',
'& a:hover': {
color: 'var(--link)',
},
marginBottom: '2rem',
})}
>
<Logo />
<div
id="search-field-container"
css={mq({
display: ['none', null, 'block'],
width: ['100%', null, null, null, '80%'],
})}
>
<SearchField />
</div>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/why-keystone">Why Keystone</LinkItem>
</span>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/updates">Updates</LinkItem>
</span>
{/* TODO: Add in search for mobile via this button */}
{/*
<button
css={mq({
display: ['inline-block', 'inline-block', 'none'],
appearance: 'none',
border: '0 none',
boxShadow: 'none',
background: 'transparent',
padding: '0.25rem',
cursor: 'pointer',
color: 'var(--muted)',
})}
>
<Search css={{ height: '1.4rem', marginTop: '0.2rem' }} />
</button>
*/}
<Button
as="a"
href="/docs"
shadow
css={mq({
'&&': {
display: ['none', null, 'inline-flex'],
},
})}
>
Documentation
</Button>
<DarkModeBtn />
<a
href="https://github.com/keystonejs/keystone"
target="_blank"
rel="noopener noreferrer"
css={mq({
display: ['none', null, 'inline-flex'],
padding: 0,
justifyContent: 'center',
borderRadius: '100%',
color: 'currentColor',
transition: 'color 0.3s ease',
':hover': {
color: '#000',
},
})}
>
<GitHub css={{ height: '1.5em' }} />
| {
setDesktopOpenState(-1);
} | conditional_block |
Header.tsx | } from '../lib/media';
import { useMediaQuery } from '../lib/media';
import { SearchField } from './primitives/SearchField';
import { Highlight } from './primitives/Highlight';
import { Wrapper } from './primitives/Wrapper';
import { Hamburger } from './icons/Hamburger';
import { Button } from './primitives/Button';
import { NavItem } from './docs/Navigation';
import { DarkModeBtn } from './DarkModeBtn';
import { Keystone } from './icons/Keystone';
import { MobileMenu } from './MobileMenu';
import { GitHub } from './icons/GitHub';
// TODO: Add in search for mobile via this button
// import { Search } from './icons/Search';
type HeaderContextType = { mobileNavIsOpen: boolean; desktopOpenState: number };
const HeaderContext = createContext<HeaderContextType>({
mobileNavIsOpen: false,
desktopOpenState: -1,
});
export const useHeaderContext = () => useContext(HeaderContext);
function Logo() {
const mq = useMediaQuery();
return (
<div
css={mq({
marginRight: [0, null, null, null, '1rem'],
marginTop: '0.1rem',
whiteSpace: 'nowrap',
})}
>
<Link href="/" passHref>
<a
css={{
fontSize: 'var(--font-medium)',
fontWeight: 600,
verticalAlign: 'middle',
transition: 'color 0.3s ease',
}}
>
<Keystone
grad="logo"
css={{
display: 'inline-block',
width: '2rem',
height: '2rem',
margin: '0 var(--space-medium) var(--space-xsmall) 0',
verticalAlign: 'middle',
}}
/>
<Highlight>Keystone 6</Highlight>
</a>
</Link>
</div>
);
}
function useCurrentSection() {
const { pathname } = useRouter();
const check = (candidate: string) => pathname.startsWith(candidate);
if (['/updates', '/releases'].some(check)) return '/updates';
if (['/why-keystone', '/for-'].some(check)) return '/why-keystone';
if (['/docs'].some(check)) return '/docs';
}
function LinkItem({ children, href }: { children: ReactNode; href: string }) {
const mq = useMediaQuery();
const currentSection = useCurrentSection();
const isActive = href === currentSection;
return (
<span css={mq({ display: ['none', 'inline'], fontWeight: 600 })}>
<NavItem
isActive={isActive}
alwaysVisible
href={href}
css={{
padding: '0 !important',
}}
>
{children}
</NavItem>
</span>
);
}
export function Header() {
const mq = useMediaQuery();
const router = useRouter();
const menuRef = useRef<HTMLDivElement>(null);
const headerRef = useRef<HTMLElement>(null);
const [mobileNavIsOpen, setMobileNavIsOpen] = useState(false);
const [desktopOpenState, setDesktopOpenState] = useState(-1);
useEffect(() => {
const listener = () => {
setMobileNavIsOpen(false);
setDesktopOpenState(-1);
const width = Math.max(
document.body.scrollWidth,
document.documentElement.scrollWidth,
document.body.offsetWidth,
document.documentElement.offsetWidth,
document.documentElement.clientWidth
);
if (width > BREAK_POINTS.sm) {
setDesktopOpenState(-1);
} else {
setDesktopOpenState(-1);
}
};
window.addEventListener('resize', debounce(listener, 130));
return () => {
window.removeEventListener('resize', debounce(listener, 130));
};
}, [setDesktopOpenState]);
useEffect(() => {
document.body.style.overflow = 'auto';
// search - init field
let searchAttempt = 0;
// @ts-ignore
document.getElementById('search-field').disabled = true;
const loadSearch = (searchAttempt: number) => {
// @ts-ignore
if (window.docsearch && searchAttempt < 10) {
// @ts-ignore
document.getElementById('search-field').disabled = false;
// @ts-ignore
window.docsearch({
apiKey: '211e94c001e6b4c6744ae72fb252eaba',
indexName: 'keystonejs',
inputSelector: '#search-field',
algoliaOptions: {
facetFilters: ['tags:stable'],
},
transformData: (results: any) => {
if (window.location.hostname == 'keystonejs.com') return results;
return results.map((result: object) => {
// @ts-ignore
result.url = result.url.replace('https://keystonejs.com', window.location.origin);
return result;
});
},
});
} else if (searchAttempt >= 10) {
// @ts-ignore
document.getElementById('search-field-container').style.visibility = 'hidden';
} else {
setTimeout(() => loadSearch(searchAttempt++), 500);
}
};
loadSearch(searchAttempt);
// search - keyboard shortcut
let keysPressed = {};
document.body.addEventListener('keydown', event => {
// @ts-ignore
keysPressed[event.key] = true;
// @ts-ignore
if (keysPressed['Meta'] && event.key == 'k') {
event.preventDefault();
document.getElementById('search-field')?.focus();
}
});
document.body.addEventListener('keyup', event => {
// @ts-ignore
delete keysPressed[event.key];
});
}, []);
const handleOpen = useCallback(() => {
setMobileNavIsOpen(true);
document.body.style.overflow = 'hidden';
document.getElementById('mobile-menu-close-btn')?.focus();
}, []);
const handleClose = useCallback(() => {
setMobileNavIsOpen(false);
document.body.style.overflow = 'auto';
document.getElementById('skip-link-navigation-btn')?.focus();
}, []);
useEffect(() => {
router.events.on('routeChangeComplete', handleClose);
return () => {
router.events.off('routeChangeComplete', handleClose);
};
}, [router.events, handleClose]);
return (
<header ref={headerRef}>
<Wrapper
css={mq({
display: 'grid',
gridTemplateColumns: [
'auto max-content max-content max-content',
'auto max-content max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content max-content',
'15rem auto max-content max-content max-content max-content max-content max-content',
],
gap: [
'var(--space-medium)',
'var(--space-large)',
'var(--space-medium)',
'var(--space-large)',
'var(--space-xlarge)',
],
justifyItems: 'start',
alignItems: 'center',
paddingTop: 'var(--space-xlarge)',
paddingBottom: 'var(--space-xlarge)',
color: 'var(--muted)',
'& a:hover': {
color: 'var(--link)',
},
marginBottom: '2rem',
})}
>
<Logo />
<div
id="search-field-container"
css={mq({
display: ['none', null, 'block'],
width: ['100%', null, null, null, '80%'],
})}
>
<SearchField />
</div>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/why-keystone">Why Keystone</LinkItem>
</span>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/updates">Updates</LinkItem>
</span>
{/* TODO: Add in search for mobile via this button */}
{/*
<button
css={mq({ | padding: '0.25rem',
cursor: 'pointer',
color: 'var(--muted)',
})}
>
<Search css={{ height: '1.4rem', marginTop: '0.2rem' }} />
</button>
*/}
<Button
as="a"
href="/docs"
shadow
css={mq({
'&&': {
display: ['none', null, 'inline-flex'],
},
})}
>
Documentation
</Button>
<DarkModeBtn />
<a
href="https://github.com/keystonejs/keystone"
target="_blank"
rel="noopener noreferrer"
css={mq({
display: ['none', null, 'inline-flex'],
padding: 0,
justifyContent: 'center',
borderRadius: '100%',
color: 'currentColor',
transition: 'color 0.3s ease',
':hover': {
color: '#000',
},
})}
>
<GitHub css={{ height: '1.5em' }} />
| display: ['inline-block', 'inline-block', 'none'],
appearance: 'none',
border: '0 none',
boxShadow: 'none',
background: 'transparent', | random_line_split |
Header.tsx | } from '../lib/media';
import { useMediaQuery } from '../lib/media';
import { SearchField } from './primitives/SearchField';
import { Highlight } from './primitives/Highlight';
import { Wrapper } from './primitives/Wrapper';
import { Hamburger } from './icons/Hamburger';
import { Button } from './primitives/Button';
import { NavItem } from './docs/Navigation';
import { DarkModeBtn } from './DarkModeBtn';
import { Keystone } from './icons/Keystone';
import { MobileMenu } from './MobileMenu';
import { GitHub } from './icons/GitHub';
// TODO: Add in search for mobile via this button
// import { Search } from './icons/Search';
type HeaderContextType = { mobileNavIsOpen: boolean; desktopOpenState: number };
const HeaderContext = createContext<HeaderContextType>({
mobileNavIsOpen: false,
desktopOpenState: -1,
});
export const useHeaderContext = () => useContext(HeaderContext);
function | () {
const mq = useMediaQuery();
return (
<div
css={mq({
marginRight: [0, null, null, null, '1rem'],
marginTop: '0.1rem',
whiteSpace: 'nowrap',
})}
>
<Link href="/" passHref>
<a
css={{
fontSize: 'var(--font-medium)',
fontWeight: 600,
verticalAlign: 'middle',
transition: 'color 0.3s ease',
}}
>
<Keystone
grad="logo"
css={{
display: 'inline-block',
width: '2rem',
height: '2rem',
margin: '0 var(--space-medium) var(--space-xsmall) 0',
verticalAlign: 'middle',
}}
/>
<Highlight>Keystone 6</Highlight>
</a>
</Link>
</div>
);
}
function useCurrentSection() {
const { pathname } = useRouter();
const check = (candidate: string) => pathname.startsWith(candidate);
if (['/updates', '/releases'].some(check)) return '/updates';
if (['/why-keystone', '/for-'].some(check)) return '/why-keystone';
if (['/docs'].some(check)) return '/docs';
}
function LinkItem({ children, href }: { children: ReactNode; href: string }) {
const mq = useMediaQuery();
const currentSection = useCurrentSection();
const isActive = href === currentSection;
return (
<span css={mq({ display: ['none', 'inline'], fontWeight: 600 })}>
<NavItem
isActive={isActive}
alwaysVisible
href={href}
css={{
padding: '0 !important',
}}
>
{children}
</NavItem>
</span>
);
}
export function Header() {
const mq = useMediaQuery();
const router = useRouter();
const menuRef = useRef<HTMLDivElement>(null);
const headerRef = useRef<HTMLElement>(null);
const [mobileNavIsOpen, setMobileNavIsOpen] = useState(false);
const [desktopOpenState, setDesktopOpenState] = useState(-1);
useEffect(() => {
const listener = () => {
setMobileNavIsOpen(false);
setDesktopOpenState(-1);
const width = Math.max(
document.body.scrollWidth,
document.documentElement.scrollWidth,
document.body.offsetWidth,
document.documentElement.offsetWidth,
document.documentElement.clientWidth
);
if (width > BREAK_POINTS.sm) {
setDesktopOpenState(-1);
} else {
setDesktopOpenState(-1);
}
};
window.addEventListener('resize', debounce(listener, 130));
return () => {
window.removeEventListener('resize', debounce(listener, 130));
};
}, [setDesktopOpenState]);
useEffect(() => {
document.body.style.overflow = 'auto';
// search - init field
let searchAttempt = 0;
// @ts-ignore
document.getElementById('search-field').disabled = true;
const loadSearch = (searchAttempt: number) => {
// @ts-ignore
if (window.docsearch && searchAttempt < 10) {
// @ts-ignore
document.getElementById('search-field').disabled = false;
// @ts-ignore
window.docsearch({
apiKey: '211e94c001e6b4c6744ae72fb252eaba',
indexName: 'keystonejs',
inputSelector: '#search-field',
algoliaOptions: {
facetFilters: ['tags:stable'],
},
transformData: (results: any) => {
if (window.location.hostname == 'keystonejs.com') return results;
return results.map((result: object) => {
// @ts-ignore
result.url = result.url.replace('https://keystonejs.com', window.location.origin);
return result;
});
},
});
} else if (searchAttempt >= 10) {
// @ts-ignore
document.getElementById('search-field-container').style.visibility = 'hidden';
} else {
setTimeout(() => loadSearch(searchAttempt++), 500);
}
};
loadSearch(searchAttempt);
// search - keyboard shortcut
let keysPressed = {};
document.body.addEventListener('keydown', event => {
// @ts-ignore
keysPressed[event.key] = true;
// @ts-ignore
if (keysPressed['Meta'] && event.key == 'k') {
event.preventDefault();
document.getElementById('search-field')?.focus();
}
});
document.body.addEventListener('keyup', event => {
// @ts-ignore
delete keysPressed[event.key];
});
}, []);
const handleOpen = useCallback(() => {
setMobileNavIsOpen(true);
document.body.style.overflow = 'hidden';
document.getElementById('mobile-menu-close-btn')?.focus();
}, []);
const handleClose = useCallback(() => {
setMobileNavIsOpen(false);
document.body.style.overflow = 'auto';
document.getElementById('skip-link-navigation-btn')?.focus();
}, []);
useEffect(() => {
router.events.on('routeChangeComplete', handleClose);
return () => {
router.events.off('routeChangeComplete', handleClose);
};
}, [router.events, handleClose]);
return (
<header ref={headerRef}>
<Wrapper
css={mq({
display: 'grid',
gridTemplateColumns: [
'auto max-content max-content max-content',
'auto max-content max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content max-content',
'15rem auto max-content max-content max-content max-content max-content max-content',
],
gap: [
'var(--space-medium)',
'var(--space-large)',
'var(--space-medium)',
'var(--space-large)',
'var(--space-xlarge)',
],
justifyItems: 'start',
alignItems: 'center',
paddingTop: 'var(--space-xlarge)',
paddingBottom: 'var(--space-xlarge)',
color: 'var(--muted)',
'& a:hover': {
color: 'var(--link)',
},
marginBottom: '2rem',
})}
>
<Logo />
<div
id="search-field-container"
css={mq({
display: ['none', null, 'block'],
width: ['100%', null, null, null, '80%'],
})}
>
<SearchField />
</div>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/why-keystone">Why Keystone</LinkItem>
</span>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/updates">Updates</LinkItem>
</span>
{/* TODO: Add in search for mobile via this button */}
{/*
<button
css={mq({
display: ['inline-block', 'inline-block', 'none'],
appearance: 'none',
border: '0 none',
boxShadow: 'none',
background: 'transparent',
padding: '0.25rem',
cursor: 'pointer',
color: 'var(--muted)',
})}
>
<Search css={{ height: '1.4rem', marginTop: '0.2rem' }} />
</button>
*/}
<Button
as="a"
href="/docs"
shadow
css={mq({
'&&': {
display: ['none', null, 'inline-flex'],
},
})}
>
Documentation
</Button>
<DarkModeBtn />
<a
href="https://github.com/keystonejs/keystone"
target="_blank"
rel="noopener noreferrer"
css={mq({
display: ['none', null, 'inline-flex'],
padding: 0,
justifyContent: 'center',
borderRadius: '100%',
color: 'currentColor',
transition: 'color 0.3s ease',
':hover': {
color: '#000',
},
})}
>
<GitHub css={{ height: '1.5em' }} />
| Logo | identifier_name |
Header.tsx | from '../lib/media';
import { useMediaQuery } from '../lib/media';
import { SearchField } from './primitives/SearchField';
import { Highlight } from './primitives/Highlight';
import { Wrapper } from './primitives/Wrapper';
import { Hamburger } from './icons/Hamburger';
import { Button } from './primitives/Button';
import { NavItem } from './docs/Navigation';
import { DarkModeBtn } from './DarkModeBtn';
import { Keystone } from './icons/Keystone';
import { MobileMenu } from './MobileMenu';
import { GitHub } from './icons/GitHub';
// TODO: Add in search for mobile via this button
// import { Search } from './icons/Search';
type HeaderContextType = { mobileNavIsOpen: boolean; desktopOpenState: number };
const HeaderContext = createContext<HeaderContextType>({
mobileNavIsOpen: false,
desktopOpenState: -1,
});
export const useHeaderContext = () => useContext(HeaderContext);
function Logo() {
const mq = useMediaQuery();
return (
<div
css={mq({
marginRight: [0, null, null, null, '1rem'],
marginTop: '0.1rem',
whiteSpace: 'nowrap',
})}
>
<Link href="/" passHref>
<a
css={{
fontSize: 'var(--font-medium)',
fontWeight: 600,
verticalAlign: 'middle',
transition: 'color 0.3s ease',
}}
>
<Keystone
grad="logo"
css={{
display: 'inline-block',
width: '2rem',
height: '2rem',
margin: '0 var(--space-medium) var(--space-xsmall) 0',
verticalAlign: 'middle',
}}
/>
<Highlight>Keystone 6</Highlight>
</a>
</Link>
</div>
);
}
function useCurrentSection() |
function LinkItem({ children, href }: { children: ReactNode; href: string }) {
const mq = useMediaQuery();
const currentSection = useCurrentSection();
const isActive = href === currentSection;
return (
<span css={mq({ display: ['none', 'inline'], fontWeight: 600 })}>
<NavItem
isActive={isActive}
alwaysVisible
href={href}
css={{
padding: '0 !important',
}}
>
{children}
</NavItem>
</span>
);
}
export function Header() {
const mq = useMediaQuery();
const router = useRouter();
const menuRef = useRef<HTMLDivElement>(null);
const headerRef = useRef<HTMLElement>(null);
const [mobileNavIsOpen, setMobileNavIsOpen] = useState(false);
const [desktopOpenState, setDesktopOpenState] = useState(-1);
useEffect(() => {
const listener = () => {
setMobileNavIsOpen(false);
setDesktopOpenState(-1);
const width = Math.max(
document.body.scrollWidth,
document.documentElement.scrollWidth,
document.body.offsetWidth,
document.documentElement.offsetWidth,
document.documentElement.clientWidth
);
if (width > BREAK_POINTS.sm) {
setDesktopOpenState(-1);
} else {
setDesktopOpenState(-1);
}
};
window.addEventListener('resize', debounce(listener, 130));
return () => {
window.removeEventListener('resize', debounce(listener, 130));
};
}, [setDesktopOpenState]);
useEffect(() => {
document.body.style.overflow = 'auto';
// search - init field
let searchAttempt = 0;
// @ts-ignore
document.getElementById('search-field').disabled = true;
const loadSearch = (searchAttempt: number) => {
// @ts-ignore
if (window.docsearch && searchAttempt < 10) {
// @ts-ignore
document.getElementById('search-field').disabled = false;
// @ts-ignore
window.docsearch({
apiKey: '211e94c001e6b4c6744ae72fb252eaba',
indexName: 'keystonejs',
inputSelector: '#search-field',
algoliaOptions: {
facetFilters: ['tags:stable'],
},
transformData: (results: any) => {
if (window.location.hostname == 'keystonejs.com') return results;
return results.map((result: object) => {
// @ts-ignore
result.url = result.url.replace('https://keystonejs.com', window.location.origin);
return result;
});
},
});
} else if (searchAttempt >= 10) {
// @ts-ignore
document.getElementById('search-field-container').style.visibility = 'hidden';
} else {
setTimeout(() => loadSearch(searchAttempt++), 500);
}
};
loadSearch(searchAttempt);
// search - keyboard shortcut
let keysPressed = {};
document.body.addEventListener('keydown', event => {
// @ts-ignore
keysPressed[event.key] = true;
// @ts-ignore
if (keysPressed['Meta'] && event.key == 'k') {
event.preventDefault();
document.getElementById('search-field')?.focus();
}
});
document.body.addEventListener('keyup', event => {
// @ts-ignore
delete keysPressed[event.key];
});
}, []);
const handleOpen = useCallback(() => {
setMobileNavIsOpen(true);
document.body.style.overflow = 'hidden';
document.getElementById('mobile-menu-close-btn')?.focus();
}, []);
const handleClose = useCallback(() => {
setMobileNavIsOpen(false);
document.body.style.overflow = 'auto';
document.getElementById('skip-link-navigation-btn')?.focus();
}, []);
useEffect(() => {
router.events.on('routeChangeComplete', handleClose);
return () => {
router.events.off('routeChangeComplete', handleClose);
};
}, [router.events, handleClose]);
return (
<header ref={headerRef}>
<Wrapper
css={mq({
display: 'grid',
gridTemplateColumns: [
'auto max-content max-content max-content',
'auto max-content max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content',
'max-content auto max-content max-content max-content max-content max-content max-content',
'15rem auto max-content max-content max-content max-content max-content max-content',
],
gap: [
'var(--space-medium)',
'var(--space-large)',
'var(--space-medium)',
'var(--space-large)',
'var(--space-xlarge)',
],
justifyItems: 'start',
alignItems: 'center',
paddingTop: 'var(--space-xlarge)',
paddingBottom: 'var(--space-xlarge)',
color: 'var(--muted)',
'& a:hover': {
color: 'var(--link)',
},
marginBottom: '2rem',
})}
>
<Logo />
<div
id="search-field-container"
css={mq({
display: ['none', null, 'block'],
width: ['100%', null, null, null, '80%'],
})}
>
<SearchField />
</div>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/why-keystone">Why Keystone</LinkItem>
</span>
<span
css={mq({
display: ['none', null, 'inline-block'],
})}
>
<LinkItem href="/updates">Updates</LinkItem>
</span>
{/* TODO: Add in search for mobile via this button */}
{/*
<button
css={mq({
display: ['inline-block', 'inline-block', 'none'],
appearance: 'none',
border: '0 none',
boxShadow: 'none',
background: 'transparent',
padding: '0.25rem',
cursor: 'pointer',
color: 'var(--muted)',
})}
>
<Search css={{ height: '1.4rem', marginTop: '0.2rem' }} />
</button>
*/}
<Button
as="a"
href="/docs"
shadow
css={mq({
'&&': {
display: ['none', null, 'inline-flex'],
},
})}
>
Documentation
</Button>
<DarkModeBtn />
<a
href="https://github.com/keystonejs/keystone"
target="_blank"
rel="noopener noreferrer"
css={mq({
display: ['none', null, 'inline-flex'],
padding: 0,
justifyContent: 'center',
borderRadius: '100%',
color: 'currentColor',
transition: 'color 0.3s ease',
':hover': {
color: '#000',
},
})}
>
<GitHub css={{ height: '1.5em' }} />
| {
const { pathname } = useRouter();
const check = (candidate: string) => pathname.startsWith(candidate);
if (['/updates', '/releases'].some(check)) return '/updates';
if (['/why-keystone', '/for-'].some(check)) return '/why-keystone';
if (['/docs'].some(check)) return '/docs';
} | identifier_body |
index.js | ave] Resave unmodified sessions back to the store
* @param {Boolean} [options.rolling] Enable/disable rolling session expiration
* @param {Boolean} [options.saveUninitialized] Save uninitialized sessions to the store
* @param {String|Array} [options.secret] Secret for signing session ID
* @param {Object} [options.store=MemoryStore] Session store
* @param {String} [options.unset]
* @return {Function} middleware
* @public
*/
function session(options){
var options = options || {}
// name - previously "options.key"
, name = options.name || options.key || 'connect.sid'
, store = options.store || new MemoryStore
, cookie = options.cookie || {}
, trustProxy = options.proxy
, storeReady = true
, rollingSessions = options.rolling || false;
var resaveSession = options.resave;
var saveUninitializedSession = options.saveUninitialized;
var secret = options.secret;
var shouldLock = options.lockFilter || defaultLockFunc;
var generateId = options.genid || generateSessionId;
if (typeof generateId !== 'function') {
throw new TypeError('genid option must be a function');
}
if (resaveSession === undefined) {
deprecate('undefined resave option; provide resave option');
resaveSession = true;
}
if (saveUninitializedSession === undefined) {
deprecate('undefined saveUninitialized option; provide saveUninitialized option');
saveUninitializedSession = true;
}
if (options.unset && options.unset !== 'destroy' && options.unset !== 'keep') {
throw new TypeError('unset option must be "destroy" or "keep"');
}
// TODO: switch to "destroy" on next major
var unsetDestroy = options.unset === 'destroy';
if (Array.isArray(secret) && secret.length === 0) {
throw new TypeError('secret option array must contain one or more strings');
}
if (secret && !Array.isArray(secret)) {
secret = [secret];
}
if (!secret) {
deprecate('req.secret; provide secret option');
}
// notify user that this store is not
// meant for a production environment
if ('production' == env && store instanceof MemoryStore) {
console.warn(warning);
}
// generates the new session
store.generate = function(req){
req.sessionID = generateId(req);
req.session = new Session(req);
req.session.cookie = new Cookie(cookie);
};
var storeImplementsTouch = typeof store.touch === 'function';
store.on('disconnect', function(){ storeReady = false; });
store.on('connect', function(){ storeReady = true; });
return function session(req, res, next) {
// self-awareness
if (req.session) return next();
// Handle connection as if there is no session if
// the store has temporarily disconnected etc
if (!storeReady) return debug('store is disconnected'), next();
// pathname mismatch
var originalPath = parseUrl.original(req).pathname;
if (0 != originalPath.indexOf(cookie.path || '/')) return next();
// ensure a secret is available or bail
if (!secret && !req.secret) {
next(new Error('secret option required for sessions'));
return;
}
// backwards compatibility for signed cookies
// req.secret is passed from the cookie parser middleware
var secrets = secret || [req.secret];
var originalHash;
var originalId;
var savedHash;
var reqShouldBeLocked = shouldLock(req);
// expose store
req.sessionStore = store;
// get the session ID from the cookie
var cookieId = req.sessionID = getcookie(req, name, secrets);
// set-cookie
onHeaders(res, function(){
if (!req.session) {
debug('no session');
return;
}
var sessionCookie = new Cookie(cookie);
// only send secure cookies via https
if (sessionCookie.secure && !issecure(req, trustProxy)) {
debug('not secured');
return;
}
if (!shouldSetCookie(req)) {
return;
}
setcookie(res, name, req.sessionID, secrets[0], sessionCookie.data);
});
// proxy end() to commit the session
var _end = res.end;
var _write = res.write;
var ended = false;
res.end = function end(chunk, encoding) {
if (ended) {
return false;
}
ended = true;
var ret;
var sync = true;
function writeend() {
if (sync) {
ret = _end.call(res, chunk, encoding);
sync = false;
return;
}
_end.call(res);
}
function writetop() {
if (!sync) {
return ret;
}
if (chunk == null) {
ret = true;
return ret;
}
var contentLength = Number(res.getHeader('Content-Length'));
if (!isNaN(contentLength) && contentLength > 0) {
// measure chunk
chunk = !Buffer.isBuffer(chunk)
? new Buffer(chunk, encoding)
: chunk;
encoding = undefined;
if (chunk.length !== 0) {
debug('split response');
ret = _write.call(res, chunk.slice(0, chunk.length - 1));
chunk = chunk.slice(chunk.length - 1, chunk.length);
return ret;
}
}
ret = _write.call(res, chunk, encoding);
sync = false;
return ret;
}
if (shouldDestroy(req) && reqShouldBeLocked) {
// destroy session
debug('destroying');
store.destroy(req.sessionID, function ondestroy(err) {
if (err) {
defer(next, err);
}
debug('destroyed');
writeend();
});
return writetop();
}
// no session to save
if (!req.session) {
debug('no session');
return _end.call(res, chunk, encoding);
}
// touch session
req.session.touch();
if (shouldSave(req) && reqShouldBeLocked) {
req.session.save(function onsave(err) {
if (err) {
defer(next, err);
}
writeend();
});
return writetop();
} else if (storeImplementsTouch && shouldTouch(req)) {
// store implements touch method
debug('touching');
store.touch(req.sessionID, req.session, function ontouch(err) {
if (err) {
defer(next, err);
}
debug('touched');
writeend();
});
return writetop();
}
return _end.call(res, chunk, encoding);
};
// generate the session
function generate() {
store.generate(req);
originalId = req.sessionID;
originalHash = hash(req.session);
wrapmethods(req.session);
}
// wrap session methods
function wrapmethods(sess) {
var _save = sess.save;
function save() {
debug('saving %s', this.id);
savedHash = hash(this);
_save.apply(this, arguments);
}
Object.defineProperty(sess, 'save', {
configurable: true,
enumerable: false,
value: save,
writable: true
});
}
// check if session has been modified
function isModified(sess) {
return originalId !== sess.id || originalHash !== hash(sess);
}
// check if session has been saved
function isSaved(sess) {
return originalId === sess.id && savedHash === hash(sess);
}
// determine if session should be destroyed
function shouldDestroy(req) {
return req.sessionID && unsetDestroy && req.session == null;
}
// determine if session should be saved to store
function shouldSave(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return !saveUninitializedSession && cookieId !== req.sessionID
? isModified(req.session)
: !isSaved(req.session)
}
// determine if session should be touched
function | (req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return cookieId === req.sessionID && !shouldSave(req);
}
// determine if cookie should be set on response
function shouldSetCookie(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
return false;
}
return cookieId != req.sessionID
? saveUninitializedSession || isModified(req.session)
: rollingSessions || req.session.cookie.expires != null && isModified(req.session);
}
// generate a session if the browser doesn't send a sessionID
if (!req.sessionID) {
debug('no SID sent, generating session');
generate();
next();
return;
}
// generate the session object
debug('fetching %s', req.sessionID);
store.get(req.sessionID, function(err, sess){
// error handling
if | shouldTouch | identifier_name |
index.js | option');
resaveSession = true;
}
if (saveUninitializedSession === undefined) {
deprecate('undefined saveUninitialized option; provide saveUninitialized option');
saveUninitializedSession = true;
}
if (options.unset && options.unset !== 'destroy' && options.unset !== 'keep') {
throw new TypeError('unset option must be "destroy" or "keep"');
}
// TODO: switch to "destroy" on next major
var unsetDestroy = options.unset === 'destroy';
if (Array.isArray(secret) && secret.length === 0) {
throw new TypeError('secret option array must contain one or more strings');
}
if (secret && !Array.isArray(secret)) {
secret = [secret];
}
if (!secret) {
deprecate('req.secret; provide secret option');
}
// notify user that this store is not
// meant for a production environment
if ('production' == env && store instanceof MemoryStore) {
console.warn(warning);
}
// generates the new session
store.generate = function(req){
req.sessionID = generateId(req);
req.session = new Session(req);
req.session.cookie = new Cookie(cookie);
};
var storeImplementsTouch = typeof store.touch === 'function';
store.on('disconnect', function(){ storeReady = false; });
store.on('connect', function(){ storeReady = true; });
return function session(req, res, next) {
// self-awareness
if (req.session) return next();
// Handle connection as if there is no session if
// the store has temporarily disconnected etc
if (!storeReady) return debug('store is disconnected'), next();
// pathname mismatch
var originalPath = parseUrl.original(req).pathname;
if (0 != originalPath.indexOf(cookie.path || '/')) return next();
// ensure a secret is available or bail
if (!secret && !req.secret) {
next(new Error('secret option required for sessions'));
return;
}
// backwards compatibility for signed cookies
// req.secret is passed from the cookie parser middleware
var secrets = secret || [req.secret];
var originalHash;
var originalId;
var savedHash;
var reqShouldBeLocked = shouldLock(req);
// expose store
req.sessionStore = store;
// get the session ID from the cookie
var cookieId = req.sessionID = getcookie(req, name, secrets);
// set-cookie
onHeaders(res, function(){
if (!req.session) {
debug('no session');
return;
}
var sessionCookie = new Cookie(cookie);
// only send secure cookies via https
if (sessionCookie.secure && !issecure(req, trustProxy)) {
debug('not secured');
return;
}
if (!shouldSetCookie(req)) {
return;
}
setcookie(res, name, req.sessionID, secrets[0], sessionCookie.data);
});
// proxy end() to commit the session
var _end = res.end;
var _write = res.write;
var ended = false;
res.end = function end(chunk, encoding) {
if (ended) {
return false;
}
ended = true;
var ret;
var sync = true;
function writeend() {
if (sync) {
ret = _end.call(res, chunk, encoding);
sync = false;
return;
}
_end.call(res);
}
function writetop() {
if (!sync) {
return ret;
}
if (chunk == null) {
ret = true;
return ret;
}
var contentLength = Number(res.getHeader('Content-Length'));
if (!isNaN(contentLength) && contentLength > 0) {
// measure chunk
chunk = !Buffer.isBuffer(chunk)
? new Buffer(chunk, encoding)
: chunk;
encoding = undefined;
if (chunk.length !== 0) {
debug('split response');
ret = _write.call(res, chunk.slice(0, chunk.length - 1));
chunk = chunk.slice(chunk.length - 1, chunk.length);
return ret;
}
}
ret = _write.call(res, chunk, encoding);
sync = false;
return ret;
}
if (shouldDestroy(req) && reqShouldBeLocked) {
// destroy session
debug('destroying');
store.destroy(req.sessionID, function ondestroy(err) {
if (err) {
defer(next, err);
}
debug('destroyed');
writeend();
});
return writetop();
}
// no session to save
if (!req.session) {
debug('no session');
return _end.call(res, chunk, encoding);
}
// touch session
req.session.touch();
if (shouldSave(req) && reqShouldBeLocked) {
req.session.save(function onsave(err) {
if (err) {
defer(next, err);
}
writeend();
});
return writetop();
} else if (storeImplementsTouch && shouldTouch(req)) {
// store implements touch method
debug('touching');
store.touch(req.sessionID, req.session, function ontouch(err) {
if (err) {
defer(next, err);
}
debug('touched');
writeend();
});
return writetop();
}
return _end.call(res, chunk, encoding);
};
// generate the session
function generate() {
store.generate(req);
originalId = req.sessionID;
originalHash = hash(req.session);
wrapmethods(req.session);
}
// wrap session methods
function wrapmethods(sess) {
var _save = sess.save;
function save() {
debug('saving %s', this.id);
savedHash = hash(this);
_save.apply(this, arguments);
}
Object.defineProperty(sess, 'save', {
configurable: true,
enumerable: false,
value: save,
writable: true
});
}
// check if session has been modified
function isModified(sess) {
return originalId !== sess.id || originalHash !== hash(sess);
}
// check if session has been saved
function isSaved(sess) {
return originalId === sess.id && savedHash === hash(sess);
}
// determine if session should be destroyed
function shouldDestroy(req) {
return req.sessionID && unsetDestroy && req.session == null;
}
// determine if session should be saved to store
function shouldSave(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return !saveUninitializedSession && cookieId !== req.sessionID
? isModified(req.session)
: !isSaved(req.session)
}
// determine if session should be touched
function shouldTouch(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return cookieId === req.sessionID && !shouldSave(req);
}
// determine if cookie should be set on response
function shouldSetCookie(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
return false;
}
return cookieId != req.sessionID
? saveUninitializedSession || isModified(req.session)
: rollingSessions || req.session.cookie.expires != null && isModified(req.session);
}
// generate a session if the browser doesn't send a sessionID
if (!req.sessionID) {
debug('no SID sent, generating session');
generate();
next();
return;
}
// generate the session object
debug('fetching %s', req.sessionID);
store.get(req.sessionID, function(err, sess){
// error handling
if (err) {
debug('error %j', err);
if (err.code !== 'ENOENT') {
next(err);
return;
}
generate();
// no session
} else if (!sess) {
debug('no session found');
generate();
// populate req.session
} else {
debug('session found');
store.createSession(req, sess);
originalId = req.sessionID;
originalHash = hash(sess);
if (!resaveSession) {
savedHash = originalHash;
}
wrapmethods(req.session);
}
next();
});
};
};
/**
* Generate a session ID for a new session.
*
* @return {String}
* @private
*/
function generateSessionId(sess) {
return uid(24);
}
function defaultLockFunc(req) {
return true;
}
/**
* Get the session ID cookie from request.
*
* @return {string}
* @private
*/
function getcookie(req, name, secrets) {
var header = req.headers.cookie;
var raw;
var val;
// read from cookie header
if (header) {
var cookies = cookie.parse(header);
raw = cookies[name];
if (raw) {
if (raw.substr(0, 2) === 's:') {
val = unsigncookie(raw.slice(2), secrets); | random_line_split |
||
index.js | ) {
console.warn(warning);
}
// generates the new session
store.generate = function(req){
req.sessionID = generateId(req);
req.session = new Session(req);
req.session.cookie = new Cookie(cookie);
};
var storeImplementsTouch = typeof store.touch === 'function';
store.on('disconnect', function(){ storeReady = false; });
store.on('connect', function(){ storeReady = true; });
return function session(req, res, next) {
// self-awareness
if (req.session) return next();
// Handle connection as if there is no session if
// the store has temporarily disconnected etc
if (!storeReady) return debug('store is disconnected'), next();
// pathname mismatch
var originalPath = parseUrl.original(req).pathname;
if (0 != originalPath.indexOf(cookie.path || '/')) return next();
// ensure a secret is available or bail
if (!secret && !req.secret) {
next(new Error('secret option required for sessions'));
return;
}
// backwards compatibility for signed cookies
// req.secret is passed from the cookie parser middleware
var secrets = secret || [req.secret];
var originalHash;
var originalId;
var savedHash;
var reqShouldBeLocked = shouldLock(req);
// expose store
req.sessionStore = store;
// get the session ID from the cookie
var cookieId = req.sessionID = getcookie(req, name, secrets);
// set-cookie
onHeaders(res, function(){
if (!req.session) {
debug('no session');
return;
}
var sessionCookie = new Cookie(cookie);
// only send secure cookies via https
if (sessionCookie.secure && !issecure(req, trustProxy)) {
debug('not secured');
return;
}
if (!shouldSetCookie(req)) {
return;
}
setcookie(res, name, req.sessionID, secrets[0], sessionCookie.data);
});
// proxy end() to commit the session
var _end = res.end;
var _write = res.write;
var ended = false;
res.end = function end(chunk, encoding) {
if (ended) {
return false;
}
ended = true;
var ret;
var sync = true;
function writeend() {
if (sync) {
ret = _end.call(res, chunk, encoding);
sync = false;
return;
}
_end.call(res);
}
function writetop() {
if (!sync) {
return ret;
}
if (chunk == null) {
ret = true;
return ret;
}
var contentLength = Number(res.getHeader('Content-Length'));
if (!isNaN(contentLength) && contentLength > 0) {
// measure chunk
chunk = !Buffer.isBuffer(chunk)
? new Buffer(chunk, encoding)
: chunk;
encoding = undefined;
if (chunk.length !== 0) {
debug('split response');
ret = _write.call(res, chunk.slice(0, chunk.length - 1));
chunk = chunk.slice(chunk.length - 1, chunk.length);
return ret;
}
}
ret = _write.call(res, chunk, encoding);
sync = false;
return ret;
}
if (shouldDestroy(req) && reqShouldBeLocked) {
// destroy session
debug('destroying');
store.destroy(req.sessionID, function ondestroy(err) {
if (err) {
defer(next, err);
}
debug('destroyed');
writeend();
});
return writetop();
}
// no session to save
if (!req.session) {
debug('no session');
return _end.call(res, chunk, encoding);
}
// touch session
req.session.touch();
if (shouldSave(req) && reqShouldBeLocked) {
req.session.save(function onsave(err) {
if (err) {
defer(next, err);
}
writeend();
});
return writetop();
} else if (storeImplementsTouch && shouldTouch(req)) {
// store implements touch method
debug('touching');
store.touch(req.sessionID, req.session, function ontouch(err) {
if (err) {
defer(next, err);
}
debug('touched');
writeend();
});
return writetop();
}
return _end.call(res, chunk, encoding);
};
// generate the session
function generate() {
store.generate(req);
originalId = req.sessionID;
originalHash = hash(req.session);
wrapmethods(req.session);
}
// wrap session methods
function wrapmethods(sess) {
var _save = sess.save;
function save() {
debug('saving %s', this.id);
savedHash = hash(this);
_save.apply(this, arguments);
}
Object.defineProperty(sess, 'save', {
configurable: true,
enumerable: false,
value: save,
writable: true
});
}
// check if session has been modified
function isModified(sess) {
return originalId !== sess.id || originalHash !== hash(sess);
}
// check if session has been saved
function isSaved(sess) {
return originalId === sess.id && savedHash === hash(sess);
}
// determine if session should be destroyed
function shouldDestroy(req) {
return req.sessionID && unsetDestroy && req.session == null;
}
// determine if session should be saved to store
function shouldSave(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return !saveUninitializedSession && cookieId !== req.sessionID
? isModified(req.session)
: !isSaved(req.session)
}
// determine if session should be touched
function shouldTouch(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return cookieId === req.sessionID && !shouldSave(req);
}
// determine if cookie should be set on response
function shouldSetCookie(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
return false;
}
return cookieId != req.sessionID
? saveUninitializedSession || isModified(req.session)
: rollingSessions || req.session.cookie.expires != null && isModified(req.session);
}
// generate a session if the browser doesn't send a sessionID
if (!req.sessionID) {
debug('no SID sent, generating session');
generate();
next();
return;
}
// generate the session object
debug('fetching %s', req.sessionID);
store.get(req.sessionID, function(err, sess){
// error handling
if (err) {
debug('error %j', err);
if (err.code !== 'ENOENT') {
next(err);
return;
}
generate();
// no session
} else if (!sess) {
debug('no session found');
generate();
// populate req.session
} else {
debug('session found');
store.createSession(req, sess);
originalId = req.sessionID;
originalHash = hash(sess);
if (!resaveSession) {
savedHash = originalHash;
}
wrapmethods(req.session);
}
next();
});
};
};
/**
* Generate a session ID for a new session.
*
* @return {String}
* @private
*/
function generateSessionId(sess) {
return uid(24);
}
function defaultLockFunc(req) {
return true;
}
/**
* Get the session ID cookie from request.
*
* @return {string}
* @private
*/
function getcookie(req, name, secrets) {
var header = req.headers.cookie;
var raw;
var val;
// read from cookie header
if (header) {
var cookies = cookie.parse(header);
raw = cookies[name];
if (raw) {
if (raw.substr(0, 2) === 's:') {
val = unsigncookie(raw.slice(2), secrets);
if (val === false) {
debug('cookie signature invalid');
val = undefined;
}
} else {
debug('cookie unsigned')
}
}
}
// back-compat read from cookieParser() signedCookies data
if (!val && req.signedCookies) {
val = req.signedCookies[name];
if (val) {
deprecate('cookie should be available in req.headers.cookie');
}
}
// back-compat read from cookieParser() cookies data
if (!val && req.cookies) {
raw = req.cookies[name];
if (raw) {
if (raw.substr(0, 2) === 's:') {
val = unsigncookie(raw.slice(2), secrets);
if (val) {
deprecate('cookie should be available in req.headers.cookie');
}
if (val === false) {
debug('cookie signature invalid');
val = undefined;
}
} else | {
debug('cookie unsigned')
} | conditional_block |
|
index.js | ave] Resave unmodified sessions back to the store
* @param {Boolean} [options.rolling] Enable/disable rolling session expiration
* @param {Boolean} [options.saveUninitialized] Save uninitialized sessions to the store
* @param {String|Array} [options.secret] Secret for signing session ID
* @param {Object} [options.store=MemoryStore] Session store
* @param {String} [options.unset]
* @return {Function} middleware
* @public
*/
function session(options){
var options = options || {}
// name - previously "options.key"
, name = options.name || options.key || 'connect.sid'
, store = options.store || new MemoryStore
, cookie = options.cookie || {}
, trustProxy = options.proxy
, storeReady = true
, rollingSessions = options.rolling || false;
var resaveSession = options.resave;
var saveUninitializedSession = options.saveUninitialized;
var secret = options.secret;
var shouldLock = options.lockFilter || defaultLockFunc;
var generateId = options.genid || generateSessionId;
if (typeof generateId !== 'function') {
throw new TypeError('genid option must be a function');
}
if (resaveSession === undefined) {
deprecate('undefined resave option; provide resave option');
resaveSession = true;
}
if (saveUninitializedSession === undefined) {
deprecate('undefined saveUninitialized option; provide saveUninitialized option');
saveUninitializedSession = true;
}
if (options.unset && options.unset !== 'destroy' && options.unset !== 'keep') {
throw new TypeError('unset option must be "destroy" or "keep"');
}
// TODO: switch to "destroy" on next major
var unsetDestroy = options.unset === 'destroy';
if (Array.isArray(secret) && secret.length === 0) {
throw new TypeError('secret option array must contain one or more strings');
}
if (secret && !Array.isArray(secret)) {
secret = [secret];
}
if (!secret) {
deprecate('req.secret; provide secret option');
}
// notify user that this store is not
// meant for a production environment
if ('production' == env && store instanceof MemoryStore) {
console.warn(warning);
}
// generates the new session
store.generate = function(req){
req.sessionID = generateId(req);
req.session = new Session(req);
req.session.cookie = new Cookie(cookie);
};
var storeImplementsTouch = typeof store.touch === 'function';
store.on('disconnect', function(){ storeReady = false; });
store.on('connect', function(){ storeReady = true; });
return function session(req, res, next) {
// self-awareness
if (req.session) return next();
// Handle connection as if there is no session if
// the store has temporarily disconnected etc
if (!storeReady) return debug('store is disconnected'), next();
// pathname mismatch
var originalPath = parseUrl.original(req).pathname;
if (0 != originalPath.indexOf(cookie.path || '/')) return next();
// ensure a secret is available or bail
if (!secret && !req.secret) {
next(new Error('secret option required for sessions'));
return;
}
// backwards compatibility for signed cookies
// req.secret is passed from the cookie parser middleware
var secrets = secret || [req.secret];
var originalHash;
var originalId;
var savedHash;
var reqShouldBeLocked = shouldLock(req);
// expose store
req.sessionStore = store;
// get the session ID from the cookie
var cookieId = req.sessionID = getcookie(req, name, secrets);
// set-cookie
onHeaders(res, function(){
if (!req.session) {
debug('no session');
return;
}
var sessionCookie = new Cookie(cookie);
// only send secure cookies via https
if (sessionCookie.secure && !issecure(req, trustProxy)) {
debug('not secured');
return;
}
if (!shouldSetCookie(req)) {
return;
}
setcookie(res, name, req.sessionID, secrets[0], sessionCookie.data);
});
// proxy end() to commit the session
var _end = res.end;
var _write = res.write;
var ended = false;
res.end = function end(chunk, encoding) {
if (ended) {
return false;
}
ended = true;
var ret;
var sync = true;
function writeend() {
if (sync) {
ret = _end.call(res, chunk, encoding);
sync = false;
return;
}
_end.call(res);
}
function writetop() {
if (!sync) {
return ret;
}
if (chunk == null) {
ret = true;
return ret;
}
var contentLength = Number(res.getHeader('Content-Length'));
if (!isNaN(contentLength) && contentLength > 0) {
// measure chunk
chunk = !Buffer.isBuffer(chunk)
? new Buffer(chunk, encoding)
: chunk;
encoding = undefined;
if (chunk.length !== 0) {
debug('split response');
ret = _write.call(res, chunk.slice(0, chunk.length - 1));
chunk = chunk.slice(chunk.length - 1, chunk.length);
return ret;
}
}
ret = _write.call(res, chunk, encoding);
sync = false;
return ret;
}
if (shouldDestroy(req) && reqShouldBeLocked) {
// destroy session
debug('destroying');
store.destroy(req.sessionID, function ondestroy(err) {
if (err) {
defer(next, err);
}
debug('destroyed');
writeend();
});
return writetop();
}
// no session to save
if (!req.session) {
debug('no session');
return _end.call(res, chunk, encoding);
}
// touch session
req.session.touch();
if (shouldSave(req) && reqShouldBeLocked) {
req.session.save(function onsave(err) {
if (err) {
defer(next, err);
}
writeend();
});
return writetop();
} else if (storeImplementsTouch && shouldTouch(req)) {
// store implements touch method
debug('touching');
store.touch(req.sessionID, req.session, function ontouch(err) {
if (err) {
defer(next, err);
}
debug('touched');
writeend();
});
return writetop();
}
return _end.call(res, chunk, encoding);
};
// generate the session
function generate() {
store.generate(req);
originalId = req.sessionID;
originalHash = hash(req.session);
wrapmethods(req.session);
}
// wrap session methods
function wrapmethods(sess) {
var _save = sess.save;
function save() {
debug('saving %s', this.id);
savedHash = hash(this);
_save.apply(this, arguments);
}
Object.defineProperty(sess, 'save', {
configurable: true,
enumerable: false,
value: save,
writable: true
});
}
// check if session has been modified
function isModified(sess) {
return originalId !== sess.id || originalHash !== hash(sess);
}
// check if session has been saved
function isSaved(sess) {
return originalId === sess.id && savedHash === hash(sess);
}
// determine if session should be destroyed
function shouldDestroy(req) {
return req.sessionID && unsetDestroy && req.session == null;
}
// determine if session should be saved to store
function shouldSave(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return !saveUninitializedSession && cookieId !== req.sessionID
? isModified(req.session)
: !isSaved(req.session)
}
// determine if session should be touched
function shouldTouch(req) |
// determine if cookie should be set on response
function shouldSetCookie(req) {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
return false;
}
return cookieId != req.sessionID
? saveUninitializedSession || isModified(req.session)
: rollingSessions || req.session.cookie.expires != null && isModified(req.session);
}
// generate a session if the browser doesn't send a sessionID
if (!req.sessionID) {
debug('no SID sent, generating session');
generate();
next();
return;
}
// generate the session object
debug('fetching %s', req.sessionID);
store.get(req.sessionID, function(err, sess){
// error handling
| {
// cannot set cookie without a session ID
if (typeof req.sessionID !== 'string') {
debug('session ignored because of bogus req.sessionID %o', req.sessionID);
return false;
}
return cookieId === req.sessionID && !shouldSave(req);
} | identifier_body |
store.py | AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
from collections import namedtuple
from time import time
from random import random
from django.utils import timezone
from django.utils.functional import cached_property
from sentry.db.models.query import create_or_update
from sentry.utils.hashlib import md5_text
Key = namedtuple('Key', ('name', 'default', 'type', 'flags', 'ttl', 'grace', 'cache_key'))
CACHE_FETCH_ERR = 'Unable to fetch option cache for %s'
CACHE_UPDATE_ERR = 'Unable to update option cache for %s'
logger = logging.getLogger('sentry')
def _make_cache_key(key):
return 'o:%s' % md5_text(key).hexdigest()
def _make_cache_value(key, value):
now = int(time())
return (
value,
now + key.ttl,
now + key.ttl + key.grace,
)
class OptionsStore(object):
"""
Abstraction for the Option storage logic that should be driven
by the OptionsManager.
OptionsStore is gooey and raw. It provides no protection over
what goes into the store. It only knows that it's reading/writing
to the right place. If using the OptionsStore directly, it's your
job to do validation of the data. You should probably go through
OptionsManager instead, unless you need raw access to something.
"""
def __init__(self, cache=None, ttl=None):
self.cache = cache
self.ttl = ttl
self.flush_local_cache()
@cached_property
def model(self):
from sentry.models.option import Option
return Option
def make_key(self, name, default, type, flags, ttl, grace):
return Key(name, default, type, flags, int(ttl), int(grace), _make_cache_key(name))
def get(self, key, silent=False):
"""
Fetches a value from the options store.
"""
result = self.get_cache(key, silent=silent)
if result is not None:
return result
result = self.get_store(key, silent=silent)
if result is not None:
return result
# As a last ditch effort, let's hope we have a key
# in local cache that's possibly stale
return self.get_local_cache(key, force_grace=True)
def get_cache(self, key, silent=False):
"""
First check agaist our local in-process cache, falling
back to the network cache.
"""
value = self.get_local_cache(key)
if value is not None:
return value
if self.cache is None:
return None
cache_key = key.cache_key
try:
value = self.cache.get(cache_key)
except Exception:
if not silent:
logger.warn(CACHE_FETCH_ERR, key.name, exc_info=True)
value = None
else:
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
return value
def get_local_cache(self, key, force_grace=False):
"""
Attempt to fetch a key out of the local cache.
If the key exists, but is beyond expiration, we only
return it if grace=True. This forces the key to be returned
in a disaster scenario as long as we're still holding onto it.
This allows the OptionStore to pave over potential network hiccups
by returning a stale value.
"""
try:
value, expires, grace = self._local_cache[key.cache_key]
except KeyError:
return None
now = int(time())
# Key is within normal expiry window, so just return it
if now < expires:
return value
# If we're able to accept within grace window, return it
if force_grace and now < grace:
return value
# Let's clean up values if we're beyond grace.
if now > grace:
try:
del self._local_cache[key.cache_key]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
# In this case, it's also possible that another thread
# has updated the value at this key, causing us to evict
# it prematurely. This isn't ideal, but not terrible
# since I don't want to introduce locking to prevent this.
# Even if it did happen, the consequence is just another
# network hop.
pass
# If we're outside the grace window, even if we ask for it
# in grace, too bad. The value is considered bad.
return None
def get_store(self, key, silent=False):
"""
Attempt to fetch value from the database. If successful,
also set it back in the cache.
Returns None in both cases, if the key doesn't actually exist,
or if we errored fetching it.
NOTE: This behavior should probably be improved to differentiate
between a miss vs error, but not worth it now since the value
is limited at the moment.
"""
try:
value = self.model.objects.get(key=key.name).value
except self.model.DoesNotExist:
value = None
except Exception as e:
if not silent:
logger.exception(six.text_type(e))
value = None
else:
# we only attempt to populate the cache if we were previously
# able to successfully talk to the backend
# NOTE: There is definitely a race condition here between updating
# the store and the cache
try:
self.set_cache(key, value)
except Exception:
if not silent:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return value
def set(self, key, value):
"""
Store a value in the option store. Value must get persisted to database first,
then attempt caches. If it fails database, the entire operation blows up.
If cache fails, we ignore silently since it'll get repaired later by sync_options.
A boolean is returned to indicate if the network cache was set successfully.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.set_store(key, value)
return self.set_cache(key, value)
def set_store(self, key, value):
|
def set_cache(self, key, value):
if self.cache is None:
return None
cache_key = key.cache_key
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
try:
self.cache.set(cache_key, value, self.ttl)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def delete(self, key):
"""
Remove key out of option stores. This operation must succeed on the
database first. If database fails, an exception is raised.
If database succeeds, caches are then allowed to fail silently.
A boolean is returned to indicate if the network deletion succeeds.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.delete_store(key)
return self.delete_cache(key)
def delete_store(self, key):
self.model.objects.filter(key=key.name).delete()
def delete_cache(self, key):
cache_key = key.cache_key
try:
del self._local_cache[cache_key]
except KeyError:
pass
try:
self.cache.delete(cache_key)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def clean_local_cache(self):
"""
Iterate over our local cache items, and
remove the keys that are beyond their grace time.
"""
to_expire = []
now = int(time())
try:
for k, (_, _, grace) in six.iteritems(self._local_cache):
if now > grace:
to_expire.append(k)
except RuntimeError:
# It's possible for the dictionary to be mutated in another thread
# while iterating, but this case is rare, so instead of making a
# copy and iterating that, it's more efficient to just let it fail
# gracefully. It'll just get re-run later.
return
for k in to_expire:
try:
del self._local_cache[k]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
pass
def flush_local_cache(self):
"""
Empty store's local in-process cache.
"""
self._local_cache = {}
def maybe_clean_local_cache(self, **kwargs):
# Periodically force an expire on the local cache.
# This cleanup is purely to keep memory low and garbage collect
# old values. It's not required to run to keep things consistent.
# Internally, if an option is fetched and it's expired, it gets
# evicted immediately. This is purely for options that haven't
# been fetched since they've expired.
if not | create_or_update(
model=self.model,
key=key.name,
values={
'value': value,
'last_updated': timezone.now(),
}
) | identifier_body |
store.py | AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
from collections import namedtuple
from time import time
from random import random
from django.utils import timezone
from django.utils.functional import cached_property
from sentry.db.models.query import create_or_update
from sentry.utils.hashlib import md5_text
Key = namedtuple('Key', ('name', 'default', 'type', 'flags', 'ttl', 'grace', 'cache_key'))
CACHE_FETCH_ERR = 'Unable to fetch option cache for %s'
CACHE_UPDATE_ERR = 'Unable to update option cache for %s'
logger = logging.getLogger('sentry')
def _make_cache_key(key):
return 'o:%s' % md5_text(key).hexdigest()
def _make_cache_value(key, value):
now = int(time())
return (
value,
now + key.ttl,
now + key.ttl + key.grace,
)
class OptionsStore(object):
"""
Abstraction for the Option storage logic that should be driven
by the OptionsManager.
OptionsStore is gooey and raw. It provides no protection over
what goes into the store. It only knows that it's reading/writing
to the right place. If using the OptionsStore directly, it's your
job to do validation of the data. You should probably go through
OptionsManager instead, unless you need raw access to something.
"""
def __init__(self, cache=None, ttl=None):
self.cache = cache
self.ttl = ttl
self.flush_local_cache()
@cached_property
def model(self):
from sentry.models.option import Option
return Option
def make_key(self, name, default, type, flags, ttl, grace):
return Key(name, default, type, flags, int(ttl), int(grace), _make_cache_key(name))
def get(self, key, silent=False):
"""
Fetches a value from the options store.
"""
result = self.get_cache(key, silent=silent)
if result is not None:
return result
result = self.get_store(key, silent=silent)
if result is not None:
return result
# As a last ditch effort, let's hope we have a key
# in local cache that's possibly stale
return self.get_local_cache(key, force_grace=True)
def get_cache(self, key, silent=False):
"""
First check agaist our local in-process cache, falling
back to the network cache.
"""
value = self.get_local_cache(key)
if value is not None:
return value
if self.cache is None:
return None
cache_key = key.cache_key
try:
value = self.cache.get(cache_key)
except Exception:
if not silent:
logger.warn(CACHE_FETCH_ERR, key.name, exc_info=True)
value = None
else:
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
return value
def get_local_cache(self, key, force_grace=False):
"""
Attempt to fetch a key out of the local cache.
If the key exists, but is beyond expiration, we only
return it if grace=True. This forces the key to be returned
in a disaster scenario as long as we're still holding onto it.
This allows the OptionStore to pave over potential network hiccups
by returning a stale value.
"""
try:
value, expires, grace = self._local_cache[key.cache_key]
except KeyError:
return None
now = int(time())
# Key is within normal expiry window, so just return it
if now < expires:
return value
# If we're able to accept within grace window, return it
if force_grace and now < grace:
return value
# Let's clean up values if we're beyond grace.
if now > grace:
try:
del self._local_cache[key.cache_key]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
# In this case, it's also possible that another thread
# has updated the value at this key, causing us to evict
# it prematurely. This isn't ideal, but not terrible
# since I don't want to introduce locking to prevent this.
# Even if it did happen, the consequence is just another
# network hop.
pass
# If we're outside the grace window, even if we ask for it
# in grace, too bad. The value is considered bad.
return None
def get_store(self, key, silent=False):
"""
Attempt to fetch value from the database. If successful,
also set it back in the cache.
Returns None in both cases, if the key doesn't actually exist,
or if we errored fetching it.
NOTE: This behavior should probably be improved to differentiate
between a miss vs error, but not worth it now since the value
is limited at the moment.
"""
try:
value = self.model.objects.get(key=key.name).value
except self.model.DoesNotExist:
value = None
except Exception as e:
if not silent:
logger.exception(six.text_type(e))
value = None
else:
# we only attempt to populate the cache if we were previously
# able to successfully talk to the backend
# NOTE: There is definitely a race condition here between updating
# the store and the cache
try:
self.set_cache(key, value)
except Exception:
if not silent:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return value
def set(self, key, value):
"""
Store a value in the option store. Value must get persisted to database first,
then attempt caches. If it fails database, the entire operation blows up.
If cache fails, we ignore silently since it'll get repaired later by sync_options.
A boolean is returned to indicate if the network cache was set successfully.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.set_store(key, value)
return self.set_cache(key, value)
def set_store(self, key, value):
create_or_update(
model=self.model,
key=key.name,
values={
'value': value,
'last_updated': timezone.now(),
}
)
def set_cache(self, key, value):
if self.cache is None:
return None
cache_key = key.cache_key
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
try:
self.cache.set(cache_key, value, self.ttl)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def | (self, key):
"""
Remove key out of option stores. This operation must succeed on the
database first. If database fails, an exception is raised.
If database succeeds, caches are then allowed to fail silently.
A boolean is returned to indicate if the network deletion succeeds.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.delete_store(key)
return self.delete_cache(key)
def delete_store(self, key):
self.model.objects.filter(key=key.name).delete()
def delete_cache(self, key):
cache_key = key.cache_key
try:
del self._local_cache[cache_key]
except KeyError:
pass
try:
self.cache.delete(cache_key)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def clean_local_cache(self):
"""
Iterate over our local cache items, and
remove the keys that are beyond their grace time.
"""
to_expire = []
now = int(time())
try:
for k, (_, _, grace) in six.iteritems(self._local_cache):
if now > grace:
to_expire.append(k)
except RuntimeError:
# It's possible for the dictionary to be mutated in another thread
# while iterating, but this case is rare, so instead of making a
# copy and iterating that, it's more efficient to just let it fail
# gracefully. It'll just get re-run later.
return
for k in to_expire:
try:
del self._local_cache[k]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
pass
def flush_local_cache(self):
"""
Empty store's local in-process cache.
"""
self._local_cache = {}
def maybe_clean_local_cache(self, **kwargs):
# Periodically force an expire on the local cache.
# This cleanup is purely to keep memory low and garbage collect
# old values. It's not required to run to keep things consistent.
# Internally, if an option is fetched and it's expired, it gets
# evicted immediately. This is purely for options that haven't
# been fetched since they've expired.
if not | delete | identifier_name |
store.py | AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
from collections import namedtuple
from time import time
from random import random
from django.utils import timezone
from django.utils.functional import cached_property
from sentry.db.models.query import create_or_update
from sentry.utils.hashlib import md5_text
Key = namedtuple('Key', ('name', 'default', 'type', 'flags', 'ttl', 'grace', 'cache_key'))
CACHE_FETCH_ERR = 'Unable to fetch option cache for %s'
CACHE_UPDATE_ERR = 'Unable to update option cache for %s'
logger = logging.getLogger('sentry')
def _make_cache_key(key):
return 'o:%s' % md5_text(key).hexdigest()
def _make_cache_value(key, value):
now = int(time())
return (
value,
now + key.ttl,
now + key.ttl + key.grace,
)
class OptionsStore(object):
"""
Abstraction for the Option storage logic that should be driven
by the OptionsManager.
OptionsStore is gooey and raw. It provides no protection over
what goes into the store. It only knows that it's reading/writing
to the right place. If using the OptionsStore directly, it's your
job to do validation of the data. You should probably go through
OptionsManager instead, unless you need raw access to something.
"""
def __init__(self, cache=None, ttl=None):
self.cache = cache
self.ttl = ttl
self.flush_local_cache()
@cached_property
def model(self):
from sentry.models.option import Option
return Option
def make_key(self, name, default, type, flags, ttl, grace):
return Key(name, default, type, flags, int(ttl), int(grace), _make_cache_key(name))
def get(self, key, silent=False):
"""
Fetches a value from the options store.
"""
result = self.get_cache(key, silent=silent)
if result is not None:
return result
result = self.get_store(key, silent=silent)
if result is not None:
return result
# As a last ditch effort, let's hope we have a key
# in local cache that's possibly stale
return self.get_local_cache(key, force_grace=True)
def get_cache(self, key, silent=False):
"""
First check agaist our local in-process cache, falling
back to the network cache.
"""
value = self.get_local_cache(key)
if value is not None:
return value
if self.cache is None:
return None
cache_key = key.cache_key
try:
value = self.cache.get(cache_key)
except Exception:
if not silent:
logger.warn(CACHE_FETCH_ERR, key.name, exc_info=True)
value = None
else:
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
return value
def get_local_cache(self, key, force_grace=False):
"""
Attempt to fetch a key out of the local cache.
If the key exists, but is beyond expiration, we only
return it if grace=True. This forces the key to be returned
in a disaster scenario as long as we're still holding onto it.
This allows the OptionStore to pave over potential network hiccups
by returning a stale value.
"""
try:
value, expires, grace = self._local_cache[key.cache_key]
except KeyError:
return None
now = int(time())
# Key is within normal expiry window, so just return it
if now < expires:
|
# If we're able to accept within grace window, return it
if force_grace and now < grace:
return value
# Let's clean up values if we're beyond grace.
if now > grace:
try:
del self._local_cache[key.cache_key]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
# In this case, it's also possible that another thread
# has updated the value at this key, causing us to evict
# it prematurely. This isn't ideal, but not terrible
# since I don't want to introduce locking to prevent this.
# Even if it did happen, the consequence is just another
# network hop.
pass
# If we're outside the grace window, even if we ask for it
# in grace, too bad. The value is considered bad.
return None
def get_store(self, key, silent=False):
"""
Attempt to fetch value from the database. If successful,
also set it back in the cache.
Returns None in both cases, if the key doesn't actually exist,
or if we errored fetching it.
NOTE: This behavior should probably be improved to differentiate
between a miss vs error, but not worth it now since the value
is limited at the moment.
"""
try:
value = self.model.objects.get(key=key.name).value
except self.model.DoesNotExist:
value = None
except Exception as e:
if not silent:
logger.exception(six.text_type(e))
value = None
else:
# we only attempt to populate the cache if we were previously
# able to successfully talk to the backend
# NOTE: There is definitely a race condition here between updating
# the store and the cache
try:
self.set_cache(key, value)
except Exception:
if not silent:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return value
def set(self, key, value):
"""
Store a value in the option store. Value must get persisted to database first,
then attempt caches. If it fails database, the entire operation blows up.
If cache fails, we ignore silently since it'll get repaired later by sync_options.
A boolean is returned to indicate if the network cache was set successfully.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.set_store(key, value)
return self.set_cache(key, value)
def set_store(self, key, value):
create_or_update(
model=self.model,
key=key.name,
values={
'value': value,
'last_updated': timezone.now(),
}
)
def set_cache(self, key, value):
if self.cache is None:
return None
cache_key = key.cache_key
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
try:
self.cache.set(cache_key, value, self.ttl)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def delete(self, key):
"""
Remove key out of option stores. This operation must succeed on the
database first. If database fails, an exception is raised.
If database succeeds, caches are then allowed to fail silently.
A boolean is returned to indicate if the network deletion succeeds.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.delete_store(key)
return self.delete_cache(key)
def delete_store(self, key):
self.model.objects.filter(key=key.name).delete()
def delete_cache(self, key):
cache_key = key.cache_key
try:
del self._local_cache[cache_key]
except KeyError:
pass
try:
self.cache.delete(cache_key)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def clean_local_cache(self):
"""
Iterate over our local cache items, and
remove the keys that are beyond their grace time.
"""
to_expire = []
now = int(time())
try:
for k, (_, _, grace) in six.iteritems(self._local_cache):
if now > grace:
to_expire.append(k)
except RuntimeError:
# It's possible for the dictionary to be mutated in another thread
# while iterating, but this case is rare, so instead of making a
# copy and iterating that, it's more efficient to just let it fail
# gracefully. It'll just get re-run later.
return
for k in to_expire:
try:
del self._local_cache[k]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
pass
def flush_local_cache(self):
"""
Empty store's local in-process cache.
"""
self._local_cache = {}
def maybe_clean_local_cache(self, **kwargs):
# Periodically force an expire on the local cache.
# This cleanup is purely to keep memory low and garbage collect
# old values. It's not required to run to keep things consistent.
# Internally, if an option is fetched and it's expired, it gets
# evicted immediately. This is purely for options that haven't
# been fetched since they've expired.
if not self | return value | conditional_block |
store.py | AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
from collections import namedtuple
from time import time
from random import random
from django.utils import timezone
from django.utils.functional import cached_property
from sentry.db.models.query import create_or_update
from sentry.utils.hashlib import md5_text
Key = namedtuple('Key', ('name', 'default', 'type', 'flags', 'ttl', 'grace', 'cache_key'))
CACHE_FETCH_ERR = 'Unable to fetch option cache for %s'
CACHE_UPDATE_ERR = 'Unable to update option cache for %s'
logger = logging.getLogger('sentry')
def _make_cache_key(key):
return 'o:%s' % md5_text(key).hexdigest()
def _make_cache_value(key, value):
now = int(time())
return (
value,
now + key.ttl,
now + key.ttl + key.grace,
)
class OptionsStore(object):
"""
Abstraction for the Option storage logic that should be driven
by the OptionsManager.
OptionsStore is gooey and raw. It provides no protection over
what goes into the store. It only knows that it's reading/writing
to the right place. If using the OptionsStore directly, it's your
job to do validation of the data. You should probably go through
OptionsManager instead, unless you need raw access to something.
"""
def __init__(self, cache=None, ttl=None):
self.cache = cache
self.ttl = ttl
self.flush_local_cache()
@cached_property
def model(self):
from sentry.models.option import Option
return Option
def make_key(self, name, default, type, flags, ttl, grace):
return Key(name, default, type, flags, int(ttl), int(grace), _make_cache_key(name))
def get(self, key, silent=False):
"""
Fetches a value from the options store.
"""
result = self.get_cache(key, silent=silent)
if result is not None:
return result
result = self.get_store(key, silent=silent)
if result is not None:
return result
# As a last ditch effort, let's hope we have a key
# in local cache that's possibly stale
return self.get_local_cache(key, force_grace=True)
def get_cache(self, key, silent=False):
"""
First check agaist our local in-process cache, falling
back to the network cache.
"""
value = self.get_local_cache(key)
if value is not None:
return value
if self.cache is None:
return None
cache_key = key.cache_key
try:
value = self.cache.get(cache_key)
except Exception:
if not silent:
logger.warn(CACHE_FETCH_ERR, key.name, exc_info=True)
value = None
else:
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
return value
def get_local_cache(self, key, force_grace=False):
"""
Attempt to fetch a key out of the local cache.
If the key exists, but is beyond expiration, we only
return it if grace=True. This forces the key to be returned
in a disaster scenario as long as we're still holding onto it.
This allows the OptionStore to pave over potential network hiccups
by returning a stale value.
"""
try:
value, expires, grace = self._local_cache[key.cache_key]
except KeyError:
return None
now = int(time())
# Key is within normal expiry window, so just return it
if now < expires:
return value
# If we're able to accept within grace window, return it
if force_grace and now < grace:
return value
# Let's clean up values if we're beyond grace.
if now > grace:
try:
del self._local_cache[key.cache_key]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
# In this case, it's also possible that another thread
# has updated the value at this key, causing us to evict
# it prematurely. This isn't ideal, but not terrible
# since I don't want to introduce locking to prevent this.
# Even if it did happen, the consequence is just another
# network hop.
pass
# If we're outside the grace window, even if we ask for it
# in grace, too bad. The value is considered bad.
return None
def get_store(self, key, silent=False):
"""
Attempt to fetch value from the database. If successful,
also set it back in the cache.
Returns None in both cases, if the key doesn't actually exist,
or if we errored fetching it.
NOTE: This behavior should probably be improved to differentiate
between a miss vs error, but not worth it now since the value
is limited at the moment.
"""
try:
value = self.model.objects.get(key=key.name).value
except self.model.DoesNotExist:
value = None
except Exception as e:
if not silent:
logger.exception(six.text_type(e))
value = None
else:
# we only attempt to populate the cache if we were previously
# able to successfully talk to the backend
# NOTE: There is definitely a race condition here between updating
# the store and the cache
try:
self.set_cache(key, value)
except Exception:
if not silent:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return value
def set(self, key, value):
"""
Store a value in the option store. Value must get persisted to database first,
then attempt caches. If it fails database, the entire operation blows up.
If cache fails, we ignore silently since it'll get repaired later by sync_options.
A boolean is returned to indicate if the network cache was set successfully.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.set_store(key, value)
return self.set_cache(key, value)
def set_store(self, key, value):
create_or_update(
model=self.model,
key=key.name,
values={
'value': value,
'last_updated': timezone.now(),
}
)
def set_cache(self, key, value):
if self.cache is None:
return None
cache_key = key.cache_key
if key.ttl > 0:
self._local_cache[cache_key] = _make_cache_value(key, value)
try:
self.cache.set(cache_key, value, self.ttl)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def delete(self, key):
"""
Remove key out of option stores. This operation must succeed on the
database first. If database fails, an exception is raised.
If database succeeds, caches are then allowed to fail silently.
A boolean is returned to indicate if the network deletion succeeds.
"""
assert self.cache is not None, 'cache must be configured before mutating options'
self.delete_store(key)
return self.delete_cache(key)
def delete_store(self, key):
self.model.objects.filter(key=key.name).delete()
def delete_cache(self, key):
cache_key = key.cache_key
try:
del self._local_cache[cache_key]
except KeyError:
pass
try:
self.cache.delete(cache_key)
return True
except Exception:
logger.warn(CACHE_UPDATE_ERR, key.name, exc_info=True)
return False
def clean_local_cache(self):
"""
Iterate over our local cache items, and
remove the keys that are beyond their grace time.
"""
to_expire = []
now = int(time())
try: | to_expire.append(k)
except RuntimeError:
# It's possible for the dictionary to be mutated in another thread
# while iterating, but this case is rare, so instead of making a
# copy and iterating that, it's more efficient to just let it fail
# gracefully. It'll just get re-run later.
return
for k in to_expire:
try:
del self._local_cache[k]
except KeyError:
# This could only exist in a race condition
# where another thread has already deleted this key,
# but we'll guard ourselves against it Justin Case.
pass
def flush_local_cache(self):
"""
Empty store's local in-process cache.
"""
self._local_cache = {}
def maybe_clean_local_cache(self, **kwargs):
# Periodically force an expire on the local cache.
# This cleanup is purely to keep memory low and garbage collect
# old values. It's not required to run to keep things consistent.
# Internally, if an option is fetched and it's expired, it gets
# evicted immediately. This is purely for options that haven't
# been fetched since they've expired.
if not self | for k, (_, _, grace) in six.iteritems(self._local_cache):
if now > grace: | random_line_split |
iterable.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(generic_associated_types)]
//~^ WARNING the feature `generic_associated_types` is incomplete
use std::ops::Deref;
// FIXME(#44265): "lifetime parameters are not allowed on this type" errors will be addressed in a
// follow-up PR.
trait Iterable {
type Item<'a>;
type Iter<'a>: Iterator<Item = Self::Item<'a>>;
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
fn iter<'a>(&'a self) -> Self::Iter<'a>;
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
}
// Impl for struct type
impl<T> Iterable for Vec<T> {
type Item<'a> = &'a T;
type Iter<'a> = std::slice::Iter<'a, T>;
fn iter<'a>(&'a self) -> Self::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
self.iter()
}
}
// Impl for a primitive type
impl<T> Iterable for [T] {
type Item<'a> = &'a T;
type Iter<'a> = std::slice::Iter<'a, T>;
fn iter<'a>(&'a self) -> Self::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
self.iter()
}
}
fn make_iter<'a, I: Iterable>(it: &'a I) -> I::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
it.iter()
}
fn get_first<'a, I: Iterable>(it: &'a I) -> Option<I::Item<'a>> |
fn main() {}
| {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
it.iter().next()
} | identifier_body |
iterable.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(generic_associated_types)]
//~^ WARNING the feature `generic_associated_types` is incomplete
use std::ops::Deref;
// FIXME(#44265): "lifetime parameters are not allowed on this type" errors will be addressed in a | //~^ ERROR lifetime parameters are not allowed on this type [E0110]
fn iter<'a>(&'a self) -> Self::Iter<'a>;
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
}
// Impl for struct type
impl<T> Iterable for Vec<T> {
type Item<'a> = &'a T;
type Iter<'a> = std::slice::Iter<'a, T>;
fn iter<'a>(&'a self) -> Self::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
self.iter()
}
}
// Impl for a primitive type
impl<T> Iterable for [T] {
type Item<'a> = &'a T;
type Iter<'a> = std::slice::Iter<'a, T>;
fn iter<'a>(&'a self) -> Self::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
self.iter()
}
}
fn make_iter<'a, I: Iterable>(it: &'a I) -> I::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
it.iter()
}
fn get_first<'a, I: Iterable>(it: &'a I) -> Option<I::Item<'a>> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
it.iter().next()
}
fn main() {} | // follow-up PR.
trait Iterable {
type Item<'a>;
type Iter<'a>: Iterator<Item = Self::Item<'a>>; | random_line_split |
iterable.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(generic_associated_types)]
//~^ WARNING the feature `generic_associated_types` is incomplete
use std::ops::Deref;
// FIXME(#44265): "lifetime parameters are not allowed on this type" errors will be addressed in a
// follow-up PR.
trait Iterable {
type Item<'a>;
type Iter<'a>: Iterator<Item = Self::Item<'a>>;
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
fn iter<'a>(&'a self) -> Self::Iter<'a>;
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
}
// Impl for struct type
impl<T> Iterable for Vec<T> {
type Item<'a> = &'a T;
type Iter<'a> = std::slice::Iter<'a, T>;
fn iter<'a>(&'a self) -> Self::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
self.iter()
}
}
// Impl for a primitive type
impl<T> Iterable for [T] {
type Item<'a> = &'a T;
type Iter<'a> = std::slice::Iter<'a, T>;
fn iter<'a>(&'a self) -> Self::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
self.iter()
}
}
fn make_iter<'a, I: Iterable>(it: &'a I) -> I::Iter<'a> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
it.iter()
}
fn | <'a, I: Iterable>(it: &'a I) -> Option<I::Item<'a>> {
//~^ ERROR lifetime parameters are not allowed on this type [E0110]
it.iter().next()
}
fn main() {}
| get_first | identifier_name |
xor-joiner.rs | // Exercise 2.3
use std::os;
use std::io::File;
fn xor(a: &[u8], b: &[u8]) -> ~[u8] {
let mut ret = ~[];
for i in range(0, a.len()) { | }
fn main() {
let args: ~[~str] = os::args();
if args.len() != 3 {
println!("Usage: {:s} <inputfile1> <inputfile2>", args[0]);
} else {
let fname1 = &args[1];
let fname2 = &args[2];
let path1 = Path::new(fname1.clone());
let path2 = Path::new(fname2.clone());
let share_file1 = File::open(&path1);
let share_file2 = File::open(&path2);
match (share_file1, share_file2) {
(Some(mut share1), Some(mut share2)) => {
let share1bytes: ~[u8] = share1.read_to_end();
let share2bytes: ~[u8] = share2.read_to_end();
print!("{:s}", std::str::from_utf8_owned(
xor(share1bytes, share2bytes)));
} ,
(_, _) => fail!("Error opening input files!")
}
}
} | ret.push(a[i] ^ b[i]);
}
ret | random_line_split |
xor-joiner.rs | // Exercise 2.3
use std::os;
use std::io::File;
fn xor(a: &[u8], b: &[u8]) -> ~[u8] {
let mut ret = ~[];
for i in range(0, a.len()) {
ret.push(a[i] ^ b[i]);
}
ret
}
fn main() {
let args: ~[~str] = os::args();
if args.len() != 3 {
println!("Usage: {:s} <inputfile1> <inputfile2>", args[0]);
} else {
let fname1 = &args[1];
let fname2 = &args[2];
let path1 = Path::new(fname1.clone());
let path2 = Path::new(fname2.clone());
let share_file1 = File::open(&path1);
let share_file2 = File::open(&path2);
match (share_file1, share_file2) {
(Some(mut share1), Some(mut share2)) => | ,
(_, _) => fail!("Error opening input files!")
}
}
}
| {
let share1bytes: ~[u8] = share1.read_to_end();
let share2bytes: ~[u8] = share2.read_to_end();
print!("{:s}", std::str::from_utf8_owned(
xor(share1bytes, share2bytes)));
} | conditional_block |
xor-joiner.rs | // Exercise 2.3
use std::os;
use std::io::File;
fn xor(a: &[u8], b: &[u8]) -> ~[u8] {
let mut ret = ~[];
for i in range(0, a.len()) {
ret.push(a[i] ^ b[i]);
}
ret
}
fn | () {
let args: ~[~str] = os::args();
if args.len() != 3 {
println!("Usage: {:s} <inputfile1> <inputfile2>", args[0]);
} else {
let fname1 = &args[1];
let fname2 = &args[2];
let path1 = Path::new(fname1.clone());
let path2 = Path::new(fname2.clone());
let share_file1 = File::open(&path1);
let share_file2 = File::open(&path2);
match (share_file1, share_file2) {
(Some(mut share1), Some(mut share2)) => {
let share1bytes: ~[u8] = share1.read_to_end();
let share2bytes: ~[u8] = share2.read_to_end();
print!("{:s}", std::str::from_utf8_owned(
xor(share1bytes, share2bytes)));
} ,
(_, _) => fail!("Error opening input files!")
}
}
}
| main | identifier_name |
xor-joiner.rs | // Exercise 2.3
use std::os;
use std::io::File;
fn xor(a: &[u8], b: &[u8]) -> ~[u8] {
let mut ret = ~[];
for i in range(0, a.len()) {
ret.push(a[i] ^ b[i]);
}
ret
}
fn main() | }
}
}
| {
let args: ~[~str] = os::args();
if args.len() != 3 {
println!("Usage: {:s} <inputfile1> <inputfile2>", args[0]);
} else {
let fname1 = &args[1];
let fname2 = &args[2];
let path1 = Path::new(fname1.clone());
let path2 = Path::new(fname2.clone());
let share_file1 = File::open(&path1);
let share_file2 = File::open(&path2);
match (share_file1, share_file2) {
(Some(mut share1), Some(mut share2)) => {
let share1bytes: ~[u8] = share1.read_to_end();
let share2bytes: ~[u8] = share2.read_to_end();
print!("{:s}", std::str::from_utf8_owned(
xor(share1bytes, share2bytes)));
} ,
(_, _) => fail!("Error opening input files!") | identifier_body |
window.py | import types
from sikwidgets.region_group import RegionGroup
from sikwidgets.util import to_snakecase
from sikwidgets.widgets import *
def gen_widget_method(widget_class):
def widget(self, *args, **kwargs):
return self.create_widget(widget_class, *args, **kwargs)
return widget
class Window(RegionGroup):
def __init__(self, region, parent=None):
# FIXME: this is hacky
RegionGroup.__init__(self, parent)
# manually set the region to the given one rather
# than the region from the parent
self.search_region = region
self.region = region
self.widgets = []
self.windows = []
self.add_widget_methods()
self.contains()
# FIXME: str() shouldn't return a URI.. use image_folder() method for this
def __str__(self):
uri = to_snakecase(self.__class__.__name__)
if self.parent:
uri = os.path.join(str(self.parent), uri)
return uri
def create_image_folders(self):
for widget in self.widgets:
widget.create_image_folder()
for window in self.windows:
window.create_image_folders()
def capture_screenshots(self):
for widget in self.widgets:
widget.capture_screenshots()
for window in self.windows:
window.capture_screenshots()
def contains(self):
pass
# TODO: use some basic statistics to decide
# if we see the window or not
def | (self):
#pop_size = len(self.widgets)
#n = sample_size(pop_size)
#random.sample(self.widgets, n)
seen_widgets = 0
unseen_widgets = 0
for widget in self.widgets:
if seen_widgets >= 10:
# we're confident enough it exists
return True
if widget.exists():
seen_widgets += 1
else:
unseen_widgets += 1
if seen_widgets > 2 * unseen_widgets + 1:
return True
if seen_widgets >= unseen_widgets:
return True
return False
def create_widget(self, widget_class, *args, **kwargs):
widget = widget_class(self, *args, **kwargs)
self.widgets.append(widget)
return widget
def add_widget_methods(self):
for class_name in instantiable_widget_class_names:
widget_class = eval(class_name)
method = types.MethodType(gen_widget_method(widget_class), self, self.__class__)
# take the class, get its name in string form, and convert to snake case
method_name = to_snakecase(widget_class.__name__)
setattr(self, method_name, method)
def menu(self, menu_class, *args, **kwargs):
return self.create_widget(menu_class, *args, **kwargs)
def page(self, page_class, *args, **kwargs):
return self.create_widget(page_class, *args, **kwargs)
def window(self, window_class):
# since the region for a child window may actually be larger than
# the region for this window, we should default to passing the
# entire screen
window = window_class(self.region.getScreen(), self)
self.windows.append(window)
return window
| exists | identifier_name |
window.py | import types
from sikwidgets.region_group import RegionGroup
from sikwidgets.util import to_snakecase
from sikwidgets.widgets import *
def gen_widget_method(widget_class):
|
class Window(RegionGroup):
def __init__(self, region, parent=None):
# FIXME: this is hacky
RegionGroup.__init__(self, parent)
# manually set the region to the given one rather
# than the region from the parent
self.search_region = region
self.region = region
self.widgets = []
self.windows = []
self.add_widget_methods()
self.contains()
# FIXME: str() shouldn't return a URI.. use image_folder() method for this
def __str__(self):
uri = to_snakecase(self.__class__.__name__)
if self.parent:
uri = os.path.join(str(self.parent), uri)
return uri
def create_image_folders(self):
for widget in self.widgets:
widget.create_image_folder()
for window in self.windows:
window.create_image_folders()
def capture_screenshots(self):
for widget in self.widgets:
widget.capture_screenshots()
for window in self.windows:
window.capture_screenshots()
def contains(self):
pass
# TODO: use some basic statistics to decide
# if we see the window or not
def exists(self):
#pop_size = len(self.widgets)
#n = sample_size(pop_size)
#random.sample(self.widgets, n)
seen_widgets = 0
unseen_widgets = 0
for widget in self.widgets:
if seen_widgets >= 10:
# we're confident enough it exists
return True
if widget.exists():
seen_widgets += 1
else:
unseen_widgets += 1
if seen_widgets > 2 * unseen_widgets + 1:
return True
if seen_widgets >= unseen_widgets:
return True
return False
def create_widget(self, widget_class, *args, **kwargs):
widget = widget_class(self, *args, **kwargs)
self.widgets.append(widget)
return widget
def add_widget_methods(self):
for class_name in instantiable_widget_class_names:
widget_class = eval(class_name)
method = types.MethodType(gen_widget_method(widget_class), self, self.__class__)
# take the class, get its name in string form, and convert to snake case
method_name = to_snakecase(widget_class.__name__)
setattr(self, method_name, method)
def menu(self, menu_class, *args, **kwargs):
return self.create_widget(menu_class, *args, **kwargs)
def page(self, page_class, *args, **kwargs):
return self.create_widget(page_class, *args, **kwargs)
def window(self, window_class):
# since the region for a child window may actually be larger than
# the region for this window, we should default to passing the
# entire screen
window = window_class(self.region.getScreen(), self)
self.windows.append(window)
return window
| def widget(self, *args, **kwargs):
return self.create_widget(widget_class, *args, **kwargs)
return widget | identifier_body |
window.py | import types
from sikwidgets.region_group import RegionGroup
from sikwidgets.util import to_snakecase
from sikwidgets.widgets import *
def gen_widget_method(widget_class):
def widget(self, *args, **kwargs):
return self.create_widget(widget_class, *args, **kwargs)
return widget
class Window(RegionGroup):
def __init__(self, region, parent=None):
# FIXME: this is hacky
RegionGroup.__init__(self, parent)
# manually set the region to the given one rather
# than the region from the parent
self.search_region = region
self.region = region
self.widgets = []
self.windows = []
self.add_widget_methods()
self.contains()
# FIXME: str() shouldn't return a URI.. use image_folder() method for this
def __str__(self):
uri = to_snakecase(self.__class__.__name__)
if self.parent:
uri = os.path.join(str(self.parent), uri)
return uri
def create_image_folders(self):
for widget in self.widgets:
widget.create_image_folder()
for window in self.windows:
window.create_image_folders()
def capture_screenshots(self):
for widget in self.widgets:
widget.capture_screenshots()
for window in self.windows:
window.capture_screenshots()
def contains(self):
pass
# TODO: use some basic statistics to decide
# if we see the window or not
def exists(self):
#pop_size = len(self.widgets)
#n = sample_size(pop_size)
#random.sample(self.widgets, n)
seen_widgets = 0 | unseen_widgets = 0
for widget in self.widgets:
if seen_widgets >= 10:
# we're confident enough it exists
return True
if widget.exists():
seen_widgets += 1
else:
unseen_widgets += 1
if seen_widgets > 2 * unseen_widgets + 1:
return True
if seen_widgets >= unseen_widgets:
return True
return False
def create_widget(self, widget_class, *args, **kwargs):
widget = widget_class(self, *args, **kwargs)
self.widgets.append(widget)
return widget
def add_widget_methods(self):
for class_name in instantiable_widget_class_names:
widget_class = eval(class_name)
method = types.MethodType(gen_widget_method(widget_class), self, self.__class__)
# take the class, get its name in string form, and convert to snake case
method_name = to_snakecase(widget_class.__name__)
setattr(self, method_name, method)
def menu(self, menu_class, *args, **kwargs):
return self.create_widget(menu_class, *args, **kwargs)
def page(self, page_class, *args, **kwargs):
return self.create_widget(page_class, *args, **kwargs)
def window(self, window_class):
# since the region for a child window may actually be larger than
# the region for this window, we should default to passing the
# entire screen
window = window_class(self.region.getScreen(), self)
self.windows.append(window)
return window | random_line_split |
|
window.py | import types
from sikwidgets.region_group import RegionGroup
from sikwidgets.util import to_snakecase
from sikwidgets.widgets import *
def gen_widget_method(widget_class):
def widget(self, *args, **kwargs):
return self.create_widget(widget_class, *args, **kwargs)
return widget
class Window(RegionGroup):
def __init__(self, region, parent=None):
# FIXME: this is hacky
RegionGroup.__init__(self, parent)
# manually set the region to the given one rather
# than the region from the parent
self.search_region = region
self.region = region
self.widgets = []
self.windows = []
self.add_widget_methods()
self.contains()
# FIXME: str() shouldn't return a URI.. use image_folder() method for this
def __str__(self):
uri = to_snakecase(self.__class__.__name__)
if self.parent:
uri = os.path.join(str(self.parent), uri)
return uri
def create_image_folders(self):
for widget in self.widgets:
widget.create_image_folder()
for window in self.windows:
window.create_image_folders()
def capture_screenshots(self):
for widget in self.widgets:
widget.capture_screenshots()
for window in self.windows:
window.capture_screenshots()
def contains(self):
pass
# TODO: use some basic statistics to decide
# if we see the window or not
def exists(self):
#pop_size = len(self.widgets)
#n = sample_size(pop_size)
#random.sample(self.widgets, n)
seen_widgets = 0
unseen_widgets = 0
for widget in self.widgets:
if seen_widgets >= 10:
# we're confident enough it exists
|
if widget.exists():
seen_widgets += 1
else:
unseen_widgets += 1
if seen_widgets > 2 * unseen_widgets + 1:
return True
if seen_widgets >= unseen_widgets:
return True
return False
def create_widget(self, widget_class, *args, **kwargs):
widget = widget_class(self, *args, **kwargs)
self.widgets.append(widget)
return widget
def add_widget_methods(self):
for class_name in instantiable_widget_class_names:
widget_class = eval(class_name)
method = types.MethodType(gen_widget_method(widget_class), self, self.__class__)
# take the class, get its name in string form, and convert to snake case
method_name = to_snakecase(widget_class.__name__)
setattr(self, method_name, method)
def menu(self, menu_class, *args, **kwargs):
return self.create_widget(menu_class, *args, **kwargs)
def page(self, page_class, *args, **kwargs):
return self.create_widget(page_class, *args, **kwargs)
def window(self, window_class):
# since the region for a child window may actually be larger than
# the region for this window, we should default to passing the
# entire screen
window = window_class(self.region.getScreen(), self)
self.windows.append(window)
return window
| return True | conditional_block |
ChildWatch.spec.ts | import {assert} from 'chai';
import {Job} from '../Job';
import {BlockIO} from '../BlockProperty';
describe('Block Child Watch', function () {
it('basic', function () {
let job = new Job();
let watchLog: any[] = [];
let watch = {
| (property: BlockIO, saved: boolean) {
watchLog.push([property._name, property._value != null, Boolean(saved)]);
},
};
job.watch(watch);
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'new block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'replace with temp block');
watchLog = [];
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'replace with normal block');
watchLog = [];
job.setValue('a', null);
assert.deepEqual(watchLog, [['a', false, true]], 'remove block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'new temp block');
watchLog = [];
job.setBinding('a', 'b');
assert.deepEqual(watchLog, [['a', false, false]], 'remove block with binding');
watchLog = [];
});
});
| onChildChange | identifier_name |
ChildWatch.spec.ts | import {assert} from 'chai';
import {Job} from '../Job';
import {BlockIO} from '../BlockProperty';
| describe('Block Child Watch', function () {
it('basic', function () {
let job = new Job();
let watchLog: any[] = [];
let watch = {
onChildChange(property: BlockIO, saved: boolean) {
watchLog.push([property._name, property._value != null, Boolean(saved)]);
},
};
job.watch(watch);
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'new block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'replace with temp block');
watchLog = [];
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'replace with normal block');
watchLog = [];
job.setValue('a', null);
assert.deepEqual(watchLog, [['a', false, true]], 'remove block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'new temp block');
watchLog = [];
job.setBinding('a', 'b');
assert.deepEqual(watchLog, [['a', false, false]], 'remove block with binding');
watchLog = [];
});
}); | random_line_split |
|
ChildWatch.spec.ts | import {assert} from 'chai';
import {Job} from '../Job';
import {BlockIO} from '../BlockProperty';
describe('Block Child Watch', function () {
it('basic', function () {
let job = new Job();
let watchLog: any[] = [];
let watch = {
onChildChange(property: BlockIO, saved: boolean) | ,
};
job.watch(watch);
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'new block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'replace with temp block');
watchLog = [];
job.createBlock('a');
assert.deepEqual(watchLog, [['a', true, true]], 'replace with normal block');
watchLog = [];
job.setValue('a', null);
assert.deepEqual(watchLog, [['a', false, true]], 'remove block');
watchLog = [];
job.createOutputBlock('a');
assert.deepEqual(watchLog, [['a', true, false]], 'new temp block');
watchLog = [];
job.setBinding('a', 'b');
assert.deepEqual(watchLog, [['a', false, false]], 'remove block with binding');
watchLog = [];
});
});
| {
watchLog.push([property._name, property._value != null, Boolean(saved)]);
} | identifier_body |
24_touch_switch.py | #!/usr/bin/env python3
import RPi.GPIO as GPIO
TouchPin = 11
Gpin = 13
Rpin = 12
tmp = 0
def setup():
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(Gpin, GPIO.OUT) # Set Green Led Pin mode to output
GPIO.setup(Rpin, GPIO.OUT) # Set Red Led Pin mode to output
GPIO.setup(TouchPin, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Set BtnPin's mode is input, and pull up to high level(3.3V)
def Led(x):
if x == 0:
GPIO.output(Rpin, 1)
GPIO.output(Gpin, 0)
if x == 1:
GPIO.output(Rpin, 0)
GPIO.output(Gpin, 1)
def Print(x):
global tmp
if x != tmp:
if x == 0:
print (' **********')
print (' * ON *')
print (' **********')
if x == 1:
print (' **********')
print (' * OFF *')
print (' **********')
tmp = x
def loop():
while True:
Led(GPIO.input(TouchPin))
Print(GPIO.input(TouchPin))
def destroy():
|
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
| GPIO.output(Gpin, GPIO.HIGH) # Green led off
GPIO.output(Rpin, GPIO.HIGH) # Red led off
GPIO.cleanup() # Release resource | identifier_body |
24_touch_switch.py | #!/usr/bin/env python3
import RPi.GPIO as GPIO
TouchPin = 11
Gpin = 13
Rpin = 12
tmp = 0
def setup():
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(Gpin, GPIO.OUT) # Set Green Led Pin mode to output
GPIO.setup(Rpin, GPIO.OUT) # Set Red Led Pin mode to output
GPIO.setup(TouchPin, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Set BtnPin's mode is input, and pull up to high level(3.3V)
def Led(x):
if x == 0:
GPIO.output(Rpin, 1)
GPIO.output(Gpin, 0)
if x == 1:
GPIO.output(Rpin, 0)
GPIO.output(Gpin, 1)
def Print(x):
global tmp
if x != tmp:
if x == 0:
|
if x == 1:
print (' **********')
print (' * OFF *')
print (' **********')
tmp = x
def loop():
while True:
Led(GPIO.input(TouchPin))
Print(GPIO.input(TouchPin))
def destroy():
GPIO.output(Gpin, GPIO.HIGH) # Green led off
GPIO.output(Rpin, GPIO.HIGH) # Red led off
GPIO.cleanup() # Release resource
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
| print (' **********')
print (' * ON *')
print (' **********') | conditional_block |
24_touch_switch.py | #!/usr/bin/env python3
import RPi.GPIO as GPIO
TouchPin = 11
Gpin = 13
Rpin = 12
tmp = 0
def setup():
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(Gpin, GPIO.OUT) # Set Green Led Pin mode to output
GPIO.setup(Rpin, GPIO.OUT) # Set Red Led Pin mode to output
GPIO.setup(TouchPin, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Set BtnPin's mode is input, and pull up to high level(3.3V)
def Led(x):
if x == 0:
GPIO.output(Rpin, 1)
GPIO.output(Gpin, 0)
if x == 1:
GPIO.output(Rpin, 0)
GPIO.output(Gpin, 1)
def | (x):
global tmp
if x != tmp:
if x == 0:
print (' **********')
print (' * ON *')
print (' **********')
if x == 1:
print (' **********')
print (' * OFF *')
print (' **********')
tmp = x
def loop():
while True:
Led(GPIO.input(TouchPin))
Print(GPIO.input(TouchPin))
def destroy():
GPIO.output(Gpin, GPIO.HIGH) # Green led off
GPIO.output(Rpin, GPIO.HIGH) # Red led off
GPIO.cleanup() # Release resource
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
| Print | identifier_name |
24_touch_switch.py | #!/usr/bin/env python3 |
TouchPin = 11
Gpin = 13
Rpin = 12
tmp = 0
def setup():
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(Gpin, GPIO.OUT) # Set Green Led Pin mode to output
GPIO.setup(Rpin, GPIO.OUT) # Set Red Led Pin mode to output
GPIO.setup(TouchPin, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Set BtnPin's mode is input, and pull up to high level(3.3V)
def Led(x):
if x == 0:
GPIO.output(Rpin, 1)
GPIO.output(Gpin, 0)
if x == 1:
GPIO.output(Rpin, 0)
GPIO.output(Gpin, 1)
def Print(x):
global tmp
if x != tmp:
if x == 0:
print (' **********')
print (' * ON *')
print (' **********')
if x == 1:
print (' **********')
print (' * OFF *')
print (' **********')
tmp = x
def loop():
while True:
Led(GPIO.input(TouchPin))
Print(GPIO.input(TouchPin))
def destroy():
GPIO.output(Gpin, GPIO.HIGH) # Green led off
GPIO.output(Rpin, GPIO.HIGH) # Red led off
GPIO.cleanup() # Release resource
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy() | import RPi.GPIO as GPIO | random_line_split |
Date.js | /*!
* ${copyright}
*/
// Provides the base implementation for all model implementations
sap.ui.define(['jquery.sap.global', 'sap/ui/core/format/DateFormat', 'sap/ui/model/SimpleType', 'sap/ui/model/FormatException', 'sap/ui/model/ParseException', 'sap/ui/model/ValidateException'],
function(jQuery, DateFormat, SimpleType, FormatException, ParseException, ValidateException) {
"use strict";
/**
* Constructor for a Date type.
*
* @class
* This class represents date simple types.
*
* @extends sap.ui.model.SimpleType
*
* @author SAP SE
* @version ${version}
*
* @public
* @param {object} [oFormatOptions] Formatting options. For a list of all available options, see {@link sap.ui.core.format.DateFormat.getDateInstance DateFormat}.
* @param {object} [oFormatOptions.source] Additional set of options used to create a second <code>DateFormat</code> object for conversions between
* string values in the data source (e.g. model) and <code>Date</code>. This second format object is used to convert from a model <code>string</code> to <code>Date</code> before
* converting the <code>Date</code> to <code>string</code> with the primary format object. Vice versa, this 'source' format is also used to format an already parsed
* external value (e.g. user input) into the string format that is expected by the data source.
* For a list of all available options, see {@link sap.ui.core.format.DateFormat.getDateInstance DateFormat}.
* In case an empty object is given, the default is the ISO date notation (yyyy-MM-dd).
* @param {object} [oConstraints] Value constraints
* @param {Date|string} [oConstraints.minimum] Smallest value allowed for this type. Values for constraints must use the same type as configured via <code>oFormatOptions.source</code>.
* @param {Date|string} [oConstraints.maximum] Largest value allowed for this type. Values for constraints must use the same type as configured via <code>oFormatOptions.source</code>.
* @alias sap.ui.model.type.Date
*/
var Date1 = SimpleType.extend("sap.ui.model.type.Date", /** @lends sap.ui.model.type.Date.prototype */ {
constructor : function () {
SimpleType.apply(this, arguments);
this.sName = "Date";
}
});
Date1.prototype.formatValue = function(oValue, sInternalType) {
var oFormat;
switch (this.getPrimitiveType(sInternalType)) {
case "string":
case "any":
if (oValue == null) {
return "";
}
if (this.oFormatOptions.source && this.oFormatOptions.source.pattern !== "timestamp" && oValue === "") {
return "";
}
oFormat = this.getModelFormat();
oValue = oFormat.parse(oValue);
return this.oOutputFormat.format(oValue);
default:
throw new FormatException("Don't know how to format Date to " + sInternalType);
}
};
Date1.prototype.parseValue = function(oValue, sInternalType) {
var oResult, oBundle;
switch (this.getPrimitiveType(sInternalType)) {
case "string":
if (oValue === "") {
return null;
}
var oResult = this.oOutputFormat.parse(oValue);
if (!oResult) {
oBundle = sap.ui.getCore().getLibraryResourceBundle();
throw new ParseException(oBundle.getText(this.sName + ".Invalid"));
}
if (this.oInputFormat) {
if (this.oFormatOptions.source.pattern == "timestamp") {
oResult = oResult.getTime();
} else {
oResult = this.oInputFormat.format(oResult);
}
}
return oResult;
default:
throw new ParseException("Don't know how to parse Date from " + sInternalType);
}
};
Date1.prototype.validateValue = function(oValue) {
if (this.oConstraints) {
var oBundle = sap.ui.getCore().getLibraryResourceBundle(),
aViolatedConstraints = [],
aMessages = [],
oInputFormat = this.oInputFormat,
that = this;
// convert date into date object to compare
if (oInputFormat && this.oFormatOptions.source.pattern != "timestamp") {
oValue = oInputFormat.parse(oValue);
}
jQuery.each(this.oConstraints, function(sName, oContent) {
if (oInputFormat) {
oContent = oInputFormat.parse(oContent);
}
switch (sName) {
case "minimum":
if (oValue < oContent) {
aViolatedConstraints.push("minimum");
aMessages.push(oBundle.getText(that.sName + ".Minimum", [oContent]));
}
break;
case "maximum":
if (oValue > oContent) {
aViolatedConstraints.push("maximum");
aMessages.push(oBundle.getText(that.sName + ".Maximum", [oContent]));
}
}
});
if (aViolatedConstraints.length > 0) {
throw new ValidateException(aMessages.join(" "), aViolatedConstraints);
}
}
};
var oTimestampInputFormat = {
format: function(oValue) {
if (oValue instanceof Date) {
return oValue.getTime();
}
return null;
},
parse: function(oValue) {
if (typeof (oValue) != "number") {
if (isNaN(oValue)) {
throw new FormatException("Cannot format date: " + oValue + " is not a valid Timestamp");
} else {
oValue = parseInt(oValue, 10);
}
}
oValue = new Date(oValue);
return oValue;
}
};
Date1.prototype.getModelFormat = function() {
if (this.oInputFormat) {
if (this.oFormatOptions.source.pattern == "timestamp") {
return oTimestampInputFormat;
} else |
} else {
return SimpleType.prototype.getModelFormat.call(this);
}
};
Date1.prototype.setFormatOptions = function(oFormatOptions) {
this.oFormatOptions = oFormatOptions;
this._createFormats();
};
/**
* @protected
*/
Date1.prototype.getOutputPattern = function() {
return this.oOutputFormat.oFormatOptions.pattern;
};
/**
* Called by the framework when any localization setting changed
* @private
*/
Date1.prototype._handleLocalizationChange = function() {
// recreate formatters
this._createFormats();
};
/**
* Create formatters used by this type
* @private
*/
Date1.prototype._createFormats = function() {
var oSourceOptions = this.oFormatOptions.source;
this.oOutputFormat = DateFormat.getInstance(this.oFormatOptions);
if (oSourceOptions) {
if (jQuery.isEmptyObject(oSourceOptions)) {
oSourceOptions = {pattern: "yyyy-MM-dd"};
}
this.oInputFormat = DateFormat.getInstance(oSourceOptions);
}
};
return Date1;
});
| {
return this.oInputFormat;
} | conditional_block |
Date.js | /*!
* ${copyright}
*/
// Provides the base implementation for all model implementations
sap.ui.define(['jquery.sap.global', 'sap/ui/core/format/DateFormat', 'sap/ui/model/SimpleType', 'sap/ui/model/FormatException', 'sap/ui/model/ParseException', 'sap/ui/model/ValidateException'],
function(jQuery, DateFormat, SimpleType, FormatException, ParseException, ValidateException) {
"use strict";
/**
* Constructor for a Date type.
*
* @class
* This class represents date simple types.
*
* @extends sap.ui.model.SimpleType
*
* @author SAP SE
* @version ${version}
*
* @public
* @param {object} [oFormatOptions] Formatting options. For a list of all available options, see {@link sap.ui.core.format.DateFormat.getDateInstance DateFormat}.
* @param {object} [oFormatOptions.source] Additional set of options used to create a second <code>DateFormat</code> object for conversions between
* string values in the data source (e.g. model) and <code>Date</code>. This second format object is used to convert from a model <code>string</code> to <code>Date</code> before
* converting the <code>Date</code> to <code>string</code> with the primary format object. Vice versa, this 'source' format is also used to format an already parsed
* external value (e.g. user input) into the string format that is expected by the data source.
* For a list of all available options, see {@link sap.ui.core.format.DateFormat.getDateInstance DateFormat}.
* In case an empty object is given, the default is the ISO date notation (yyyy-MM-dd).
* @param {object} [oConstraints] Value constraints
* @param {Date|string} [oConstraints.minimum] Smallest value allowed for this type. Values for constraints must use the same type as configured via <code>oFormatOptions.source</code>.
* @param {Date|string} [oConstraints.maximum] Largest value allowed for this type. Values for constraints must use the same type as configured via <code>oFormatOptions.source</code>.
* @alias sap.ui.model.type.Date
*/
var Date1 = SimpleType.extend("sap.ui.model.type.Date", /** @lends sap.ui.model.type.Date.prototype */ {
constructor : function () {
SimpleType.apply(this, arguments);
this.sName = "Date";
}
});
Date1.prototype.formatValue = function(oValue, sInternalType) {
var oFormat;
switch (this.getPrimitiveType(sInternalType)) {
case "string":
case "any":
if (oValue == null) {
return "";
}
if (this.oFormatOptions.source && this.oFormatOptions.source.pattern !== "timestamp" && oValue === "") {
return "";
}
oFormat = this.getModelFormat();
oValue = oFormat.parse(oValue);
return this.oOutputFormat.format(oValue);
default:
throw new FormatException("Don't know how to format Date to " + sInternalType);
}
};
Date1.prototype.parseValue = function(oValue, sInternalType) {
var oResult, oBundle;
switch (this.getPrimitiveType(sInternalType)) {
case "string":
if (oValue === "") {
return null;
}
var oResult = this.oOutputFormat.parse(oValue);
if (!oResult) {
oBundle = sap.ui.getCore().getLibraryResourceBundle();
throw new ParseException(oBundle.getText(this.sName + ".Invalid"));
}
if (this.oInputFormat) {
if (this.oFormatOptions.source.pattern == "timestamp") {
oResult = oResult.getTime();
} else {
oResult = this.oInputFormat.format(oResult);
}
}
return oResult;
default:
throw new ParseException("Don't know how to parse Date from " + sInternalType);
}
};
Date1.prototype.validateValue = function(oValue) {
if (this.oConstraints) {
var oBundle = sap.ui.getCore().getLibraryResourceBundle(),
aViolatedConstraints = [],
aMessages = [],
oInputFormat = this.oInputFormat,
that = this;
// convert date into date object to compare
if (oInputFormat && this.oFormatOptions.source.pattern != "timestamp") {
oValue = oInputFormat.parse(oValue);
}
jQuery.each(this.oConstraints, function(sName, oContent) {
if (oInputFormat) {
oContent = oInputFormat.parse(oContent);
} | aMessages.push(oBundle.getText(that.sName + ".Minimum", [oContent]));
}
break;
case "maximum":
if (oValue > oContent) {
aViolatedConstraints.push("maximum");
aMessages.push(oBundle.getText(that.sName + ".Maximum", [oContent]));
}
}
});
if (aViolatedConstraints.length > 0) {
throw new ValidateException(aMessages.join(" "), aViolatedConstraints);
}
}
};
var oTimestampInputFormat = {
format: function(oValue) {
if (oValue instanceof Date) {
return oValue.getTime();
}
return null;
},
parse: function(oValue) {
if (typeof (oValue) != "number") {
if (isNaN(oValue)) {
throw new FormatException("Cannot format date: " + oValue + " is not a valid Timestamp");
} else {
oValue = parseInt(oValue, 10);
}
}
oValue = new Date(oValue);
return oValue;
}
};
Date1.prototype.getModelFormat = function() {
if (this.oInputFormat) {
if (this.oFormatOptions.source.pattern == "timestamp") {
return oTimestampInputFormat;
} else {
return this.oInputFormat;
}
} else {
return SimpleType.prototype.getModelFormat.call(this);
}
};
Date1.prototype.setFormatOptions = function(oFormatOptions) {
this.oFormatOptions = oFormatOptions;
this._createFormats();
};
/**
* @protected
*/
Date1.prototype.getOutputPattern = function() {
return this.oOutputFormat.oFormatOptions.pattern;
};
/**
* Called by the framework when any localization setting changed
* @private
*/
Date1.prototype._handleLocalizationChange = function() {
// recreate formatters
this._createFormats();
};
/**
* Create formatters used by this type
* @private
*/
Date1.prototype._createFormats = function() {
var oSourceOptions = this.oFormatOptions.source;
this.oOutputFormat = DateFormat.getInstance(this.oFormatOptions);
if (oSourceOptions) {
if (jQuery.isEmptyObject(oSourceOptions)) {
oSourceOptions = {pattern: "yyyy-MM-dd"};
}
this.oInputFormat = DateFormat.getInstance(oSourceOptions);
}
};
return Date1;
}); | switch (sName) {
case "minimum":
if (oValue < oContent) {
aViolatedConstraints.push("minimum"); | random_line_split |
filters.js | angular.module('app')
.filter('date', function(){
'use strict';
return function(timestamp, format){
return timestamp ? moment(timestamp).format(format ? format : 'll') : '<date>';
};
})
.filter('datetime', function(){
'use strict';
return function(timestamp, format){
return timestamp ? moment(timestamp).format(format ? format : 'D MMM YYYY, HH:mm:ss') : '<datetime>';
};
})
.filter('time', function(){
'use strict';
return function(timestamp, format){
return timestamp ? moment(timestamp).format(format ? format : 'LT') : '<time>';
};
})
.filter('humanTime', function(){
'use strict';
return function(timestamp){
return timestamp ? moment(timestamp).fromNow(true) : '<humanTime>';
};
})
.filter('duration', function(){
'use strict';
return function(seconds, humanize){
if(seconds || seconds === 0){
if(humanize){
return moment.duration(seconds, 'seconds').humanize();
} else {
var prefix = -60 < seconds && seconds < 60 ? '00:' : '';
return prefix + moment.duration(seconds, 'seconds').format('hh:mm:ss');
}
} else |
};
})
.filter('mynumber', function($filter){
'use strict';
return function(number, round){
var mul = Math.pow(10, round ? round : 0);
return $filter('number')(Math.round(number*mul)/mul);
};
})
.filter('rating', function($filter){
'use strict';
return function(rating, max, withText){
var stars = rating ? new Array(Math.floor(rating)+1).join('★') : '';
var maxStars = max ? new Array(Math.floor(max)-Math.floor(rating)+1).join('☆') : '';
var text = withText ? ' ('+$filter('mynumber')(rating, 1)+' / '+$filter('mynumber')(max, 1)+')' : '';
return stars+maxStars+text;
};
});
| {
console.warn('Unable to format duration', seconds);
return '<duration>';
} | conditional_block |
filters.js | angular.module('app')
.filter('date', function(){
'use strict';
return function(timestamp, format){
return timestamp ? moment(timestamp).format(format ? format : 'll') : '<date>';
};
})
.filter('datetime', function(){
'use strict';
return function(timestamp, format){
return timestamp ? moment(timestamp).format(format ? format : 'D MMM YYYY, HH:mm:ss') : '<datetime>';
};
})
.filter('time', function(){
'use strict';
return function(timestamp, format){
return timestamp ? moment(timestamp).format(format ? format : 'LT') : '<time>';
};
})
.filter('humanTime', function(){
'use strict';
return function(timestamp){
return timestamp ? moment(timestamp).fromNow(true) : '<humanTime>';
};
})
.filter('duration', function(){ | return function(seconds, humanize){
if(seconds || seconds === 0){
if(humanize){
return moment.duration(seconds, 'seconds').humanize();
} else {
var prefix = -60 < seconds && seconds < 60 ? '00:' : '';
return prefix + moment.duration(seconds, 'seconds').format('hh:mm:ss');
}
} else {
console.warn('Unable to format duration', seconds);
return '<duration>';
}
};
})
.filter('mynumber', function($filter){
'use strict';
return function(number, round){
var mul = Math.pow(10, round ? round : 0);
return $filter('number')(Math.round(number*mul)/mul);
};
})
.filter('rating', function($filter){
'use strict';
return function(rating, max, withText){
var stars = rating ? new Array(Math.floor(rating)+1).join('★') : '';
var maxStars = max ? new Array(Math.floor(max)-Math.floor(rating)+1).join('☆') : '';
var text = withText ? ' ('+$filter('mynumber')(rating, 1)+' / '+$filter('mynumber')(max, 1)+')' : '';
return stars+maxStars+text;
};
}); | 'use strict'; | random_line_split |
IQRCode.d.ts | /// <reference path="IAdaptiveRPGroup.d.ts" />
/// <reference path="IBaseReader.d.ts" />
/**
--| ADAPTIVE RUNTIME PLATFORM |----------------------------------------------------------------------------------------
(C) Copyright 2013-2015 Carlos Lozano Diez t/a Adaptive.me <http://adaptive.me>.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 . Unless required by appli-
-cable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
Original author:
* Carlos Lozano Diez
<http://github.com/carloslozano>
<http://twitter.com/adaptivecoder>
<mailto:[email protected]>
Contributors:
* Ferran Vila Conesa
<http://github.com/fnva>
<http://twitter.com/ferran_vila>
<mailto:[email protected]>
* See source code files for contributors.
Release:
* @version v2.2.15
-------------------------------------------| aut inveniam viam aut faciam |--------------------------------------------
*/
declare module Adaptive {
/**
Interface for Managing the QR Code operations
@author Carlos Lozano Diez
@since v2.0 | /**
@class Adaptive.IQRCode
*/
interface IQRCode extends IBaseReader {
}
} | @version 1.0
*/ | random_line_split |
bootstrap-datetimepicker.ms.js | /**
* Malay translation for bootstrap-datetimepicker
* Ateman Faiz <[email protected]>
*/
;(function ($) {
$.fn.datetimepicker.dates['ms'] = {
days: ["Ahad", "Isnin", "Selasa", "Rabu", "Khamis", "Jumaat", "Sabtu", "Ahad"],
daysShort: ["Aha", "Isn", "Sel", "Rab", "Kha", "Jum", "Sab", "Aha"],
daysMin: ["Ah", "Is", "Se", "Ra", "Kh", "Ju", "Sa", "Ah"],
months: ["Januari", "Februari", "Mac", "April", "Mei", "Jun", "Julai", "Ogos", "September", "Oktober", "November", "Disember"],
monthsShort: ["Jan", "Feb", "Mar", "Apr", "Mei", "Jun", "Jul", "Ogo", "Sep", "Okt", "Nov", "Dis"],
today: "Hari Ini",
suffix: [],
meridiem: []
}; | }(jQuery)); | random_line_split |
|
upsert_resolution.rs | pub upserted: Vec<TermWithoutTempIds>,
/// Allocations that resolved due to other upserts.
pub resolved: Vec<TermWithoutTempIds>,
/// Allocations that required new entid allocations.
pub allocated: Vec<TermWithoutTempIds>,
}
impl Generation {
/// Split entities into a generation of populations that need to evolve to have their tempids
/// resolved or allocated, and a population of inert entities that do not reference tempids.
pub(crate) fn from<I>(terms: I, schema: &Schema) -> Result<(Generation, Population)> where I: IntoIterator<Item=TermWithTempIds> {
let mut generation = Generation::default();
let mut inert = vec![];
let is_unique = |a: Entid| -> Result<bool> {
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
Ok(attribute.unique == Some(attribute::Unique::Identity))
};
for term in terms.into_iter() {
match term {
Term::AddOrRetract(op, Right(e), a, Right(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_ev.push(UpsertEV(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Right(v)));
}
},
Term::AddOrRetract(op, Right(e), a, Left(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_e.push(UpsertE(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Left(v)));
}
},
Term::AddOrRetract(op, Left(e), a, Right(v)) => {
generation.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(v)));
},
Term::AddOrRetract(op, Left(e), a, Left(v)) => {
inert.push(Term::AddOrRetract(op, Left(e), a, Left(v)));
},
}
}
Ok((generation, inert))
}
/// Return true if it's possible to evolve this generation further.
///
/// Note that there can be complex upserts but no simple upserts to help resolve them, and in
/// this case, we cannot evolve further.
pub(crate) fn can_evolve(&self) -> bool {
!self.upserts_e.is_empty()
}
/// Evolve this generation one step further by rewriting the existing :db/add entities using the
/// given temporary IDs.
///
/// TODO: Considering doing this in place; the function already consumes `self`.
pub(crate) fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation {
let mut next = Generation::default();
// We'll iterate our own allocations to resolve more things, but terms that have already
// resolved stay resolved.
next.resolved = self.resolved;
for UpsertE(t, a, v) in self.upserts_e {
match temp_id_map.get(&*t) {
Some(&n) => next.upserted.push(Term::AddOrRetract(OpType::Add, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(OpType::Add, Right(t), a, Left(v))),
}
}
for UpsertEV(t1, a, t2) in self.upserts_ev {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(_), Some(&n2)) => {
// Even though we can resolve entirely, it's possible that the remaining upsert
// could conflict. Moving straight to resolved doesn't give us a chance to
// search the store for the conflict.
next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0)))
},
(None, Some(&n2)) => next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(OpType::Add, Left(n1), a, Right(t2))),
(None, None) => next.upserts_ev.push(UpsertEV(t1, a, t2))
}
}
// There's no particular need to separate resolved from allocations right here and right
// now, although it is convenient.
for term in self.allocations {
// TODO: find an expression that destructures less? I still expect this to be efficient
// but it's a little verbose.
match term {
Term::AddOrRetract(op, Right(t1), a, Right(t2)) => {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(&n1), Some(&n2)) => next.resolved.push(Term::AddOrRetract(op, n1, a, TypedValue::Ref(n2.0))),
(None, Some(&n2)) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Left(TypedValue::Ref(n2.0)))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(op, Left(n1), a, Right(t2))),
(None, None) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Right(t2))),
}
},
Term::AddOrRetract(op, Right(t), a, Left(v)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(op, Right(t), a, Left(v))),
}
},
Term::AddOrRetract(op, Left(e), a, Right(t)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, e, a, TypedValue::Ref(n.0))),
None => next.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(t))),
}
},
Term::AddOrRetract(_, Left(_), _, Left(_)) => unreachable!(),
}
}
next
}
// Collect id->[a v] pairs that might upsert at this evolutionary step.
pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> {
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
// TODO: map/collect.
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
// TODO: figure out how to make this less expensive, i.e., don't require
// clone() of an arbitrary value.
temp_id_avs.push((t.clone(), (*a, v.clone())));
}
temp_id_avs
}
/// Evolve potential upserts that haven't resolved into allocations.
pub(crate) fn allocate_unresolved_upserts(&mut self) -> Result<()> {
let mut upserts_ev = vec![];
::std::mem::swap(&mut self.upserts_ev, &mut upserts_ev);
self.allocations.extend(upserts_ev.into_iter().map(|UpsertEV(t1, a, t2)| Term::AddOrRetract(OpType::Add, Right(t1), a, Right(t2))));
Ok(())
}
/// After evolution is complete, yield the set of tempids that require entid allocation.
///
/// Some of the tempids may be identified, so we also provide a map from tempid to a dense set
/// of contiguous integer labels.
pub(crate) fn temp_ids_in_allocations(&self, schema: &Schema) -> Result<BTreeMap<TempIdHandle, usize>> {
assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
assert!(self.upserts_ev.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
let mut temp_ids: BTreeSet<TempIdHandle> = BTreeSet::default();
let mut tempid_avs: BTreeMap<(Entid, TypedValueOr<TempIdHandle>), Vec<TempIdHandle>> = BTreeMap::default();
for term in self.allocations.iter() {
match term {
&Term::AddOrRetract(OpType::Add, Right(ref t1), a, Right(ref t2)) => {
temp_ids.insert(t1.clone());
temp_ids.insert(t2.clone());
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
if attribute.unique == Some(attribute::Unique::Identity) | {
tempid_avs.entry((a, Right(t2.clone()))).or_insert(vec![]).push(t1.clone());
} | conditional_block |
|
upsert_resolution.rs | :db.unique/identity if it has failed to upsert.
/// - [:db/add TEMPID b v]. b may be :db.unique/identity if it has failed to upsert.
/// - [:db/add e b OTHERID].
allocations: Vec<TermWithTempIds>,
/// Entities that upserted and no longer reference tempids. These assertions are guaranteed to
/// be in the store.
upserted: Vec<TermWithoutTempIds>,
/// Entities that resolved due to other upserts and no longer reference tempids. These
/// assertions may or may not be in the store.
resolved: Vec<TermWithoutTempIds>,
}
#[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub(crate) struct FinalPopulations {
/// Upserts that upserted.
pub upserted: Vec<TermWithoutTempIds>,
/// Allocations that resolved due to other upserts.
pub resolved: Vec<TermWithoutTempIds>,
/// Allocations that required new entid allocations.
pub allocated: Vec<TermWithoutTempIds>,
}
impl Generation {
/// Split entities into a generation of populations that need to evolve to have their tempids
/// resolved or allocated, and a population of inert entities that do not reference tempids.
pub(crate) fn from<I>(terms: I, schema: &Schema) -> Result<(Generation, Population)> where I: IntoIterator<Item=TermWithTempIds> {
let mut generation = Generation::default();
let mut inert = vec![];
let is_unique = |a: Entid| -> Result<bool> {
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
Ok(attribute.unique == Some(attribute::Unique::Identity))
};
for term in terms.into_iter() {
match term {
Term::AddOrRetract(op, Right(e), a, Right(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_ev.push(UpsertEV(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Right(v)));
}
},
Term::AddOrRetract(op, Right(e), a, Left(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_e.push(UpsertE(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Left(v)));
}
},
Term::AddOrRetract(op, Left(e), a, Right(v)) => {
generation.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(v)));
},
Term::AddOrRetract(op, Left(e), a, Left(v)) => {
inert.push(Term::AddOrRetract(op, Left(e), a, Left(v)));
},
}
}
Ok((generation, inert))
}
/// Return true if it's possible to evolve this generation further.
///
/// Note that there can be complex upserts but no simple upserts to help resolve them, and in
/// this case, we cannot evolve further.
pub(crate) fn can_evolve(&self) -> bool {
!self.upserts_e.is_empty()
}
/// Evolve this generation one step further by rewriting the existing :db/add entities using the
/// given temporary IDs.
///
/// TODO: Considering doing this in place; the function already consumes `self`.
pub(crate) fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation {
let mut next = Generation::default();
// We'll iterate our own allocations to resolve more things, but terms that have already
// resolved stay resolved.
next.resolved = self.resolved;
for UpsertE(t, a, v) in self.upserts_e {
match temp_id_map.get(&*t) {
Some(&n) => next.upserted.push(Term::AddOrRetract(OpType::Add, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(OpType::Add, Right(t), a, Left(v))),
}
}
for UpsertEV(t1, a, t2) in self.upserts_ev {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(_), Some(&n2)) => {
// Even though we can resolve entirely, it's possible that the remaining upsert
// could conflict. Moving straight to resolved doesn't give us a chance to
// search the store for the conflict.
next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0)))
},
(None, Some(&n2)) => next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(OpType::Add, Left(n1), a, Right(t2))),
(None, None) => next.upserts_ev.push(UpsertEV(t1, a, t2))
}
}
// There's no particular need to separate resolved from allocations right here and right
// now, although it is convenient.
for term in self.allocations {
// TODO: find an expression that destructures less? I still expect this to be efficient
// but it's a little verbose.
match term {
Term::AddOrRetract(op, Right(t1), a, Right(t2)) => {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(&n1), Some(&n2)) => next.resolved.push(Term::AddOrRetract(op, n1, a, TypedValue::Ref(n2.0))),
(None, Some(&n2)) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Left(TypedValue::Ref(n2.0)))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(op, Left(n1), a, Right(t2))),
(None, None) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Right(t2))),
}
},
Term::AddOrRetract(op, Right(t), a, Left(v)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(op, Right(t), a, Left(v))),
}
},
Term::AddOrRetract(op, Left(e), a, Right(t)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, e, a, TypedValue::Ref(n.0))),
None => next.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(t))),
}
},
Term::AddOrRetract(_, Left(_), _, Left(_)) => unreachable!(),
}
}
next
}
// Collect id->[a v] pairs that might upsert at this evolutionary step.
pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> |
/// Evolve potential upserts that haven't resolved into allocations.
pub(crate) fn allocate_unresolved_upserts(&mut self) -> Result<()> {
let mut upserts_ev = vec![];
::std::mem::swap(&mut self.upserts_ev, &mut upserts_ev);
self.allocations.extend(upserts_ev.into_iter().map(|UpsertEV(t1, a, t2)| Term::AddOrRetract(OpType::Add, Right(t1), a, Right(t2))));
Ok(())
}
/// After evolution is complete, yield the set of tempids that require entid allocation.
///
/// Some of the tempids may be identified, so we also provide a map from tempid to a dense set
/// of contiguous integer labels.
pub(crate) fn temp_ids_in_allocations(&self, schema: &Schema) -> Result<BTreeMap<TempIdHandle, usize>> {
assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
assert!(self.upserts_ev.is_empty(), "All upserts should have | {
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
// TODO: map/collect.
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
// TODO: figure out how to make this less expensive, i.e., don't require
// clone() of an arbitrary value.
temp_id_avs.push((t.clone(), (*a, v.clone())));
}
temp_id_avs
} | identifier_body |
upsert_resolution.rs | /// be in the store.
upserted: Vec<TermWithoutTempIds>,
/// Entities that resolved due to other upserts and no longer reference tempids. These
/// assertions may or may not be in the store.
resolved: Vec<TermWithoutTempIds>,
}
#[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub(crate) struct FinalPopulations {
/// Upserts that upserted.
pub upserted: Vec<TermWithoutTempIds>,
/// Allocations that resolved due to other upserts.
pub resolved: Vec<TermWithoutTempIds>,
/// Allocations that required new entid allocations.
pub allocated: Vec<TermWithoutTempIds>,
}
impl Generation {
/// Split entities into a generation of populations that need to evolve to have their tempids
/// resolved or allocated, and a population of inert entities that do not reference tempids.
pub(crate) fn from<I>(terms: I, schema: &Schema) -> Result<(Generation, Population)> where I: IntoIterator<Item=TermWithTempIds> {
let mut generation = Generation::default();
let mut inert = vec![];
let is_unique = |a: Entid| -> Result<bool> {
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
Ok(attribute.unique == Some(attribute::Unique::Identity))
};
for term in terms.into_iter() {
match term {
Term::AddOrRetract(op, Right(e), a, Right(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_ev.push(UpsertEV(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Right(v)));
}
},
Term::AddOrRetract(op, Right(e), a, Left(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_e.push(UpsertE(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Left(v)));
}
},
Term::AddOrRetract(op, Left(e), a, Right(v)) => {
generation.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(v)));
},
Term::AddOrRetract(op, Left(e), a, Left(v)) => {
inert.push(Term::AddOrRetract(op, Left(e), a, Left(v)));
},
}
}
Ok((generation, inert))
}
/// Return true if it's possible to evolve this generation further.
///
/// Note that there can be complex upserts but no simple upserts to help resolve them, and in
/// this case, we cannot evolve further.
pub(crate) fn can_evolve(&self) -> bool {
!self.upserts_e.is_empty()
}
/// Evolve this generation one step further by rewriting the existing :db/add entities using the
/// given temporary IDs.
///
/// TODO: Considering doing this in place; the function already consumes `self`.
pub(crate) fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation {
let mut next = Generation::default();
// We'll iterate our own allocations to resolve more things, but terms that have already
// resolved stay resolved.
next.resolved = self.resolved;
for UpsertE(t, a, v) in self.upserts_e {
match temp_id_map.get(&*t) {
Some(&n) => next.upserted.push(Term::AddOrRetract(OpType::Add, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(OpType::Add, Right(t), a, Left(v))),
}
}
for UpsertEV(t1, a, t2) in self.upserts_ev {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(_), Some(&n2)) => {
// Even though we can resolve entirely, it's possible that the remaining upsert
// could conflict. Moving straight to resolved doesn't give us a chance to
// search the store for the conflict.
next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0)))
},
(None, Some(&n2)) => next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(OpType::Add, Left(n1), a, Right(t2))),
(None, None) => next.upserts_ev.push(UpsertEV(t1, a, t2))
}
}
// There's no particular need to separate resolved from allocations right here and right
// now, although it is convenient.
for term in self.allocations {
// TODO: find an expression that destructures less? I still expect this to be efficient
// but it's a little verbose.
match term {
Term::AddOrRetract(op, Right(t1), a, Right(t2)) => {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(&n1), Some(&n2)) => next.resolved.push(Term::AddOrRetract(op, n1, a, TypedValue::Ref(n2.0))),
(None, Some(&n2)) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Left(TypedValue::Ref(n2.0)))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(op, Left(n1), a, Right(t2))),
(None, None) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Right(t2))),
}
},
Term::AddOrRetract(op, Right(t), a, Left(v)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(op, Right(t), a, Left(v))),
}
},
Term::AddOrRetract(op, Left(e), a, Right(t)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, e, a, TypedValue::Ref(n.0))),
None => next.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(t))),
}
},
Term::AddOrRetract(_, Left(_), _, Left(_)) => unreachable!(),
}
}
next
}
// Collect id->[a v] pairs that might upsert at this evolutionary step.
pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> {
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
// TODO: map/collect.
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
// TODO: figure out how to make this less expensive, i.e., don't require
// clone() of an arbitrary value.
temp_id_avs.push((t.clone(), (*a, v.clone())));
}
temp_id_avs
}
/// Evolve potential upserts that haven't resolved into allocations.
pub(crate) fn allocate_unresolved_upserts(&mut self) -> Result<()> {
let mut upserts_ev = vec![];
::std::mem::swap(&mut self.upserts_ev, &mut upserts_ev);
self.allocations.extend(upserts_ev.into_iter().map(|UpsertEV(t1, a, t2)| Term::AddOrRetract(OpType::Add, Right(t1), a, Right(t2))));
Ok(())
}
/// After evolution is complete, yield the set of tempids that require entid allocation.
///
/// Some of the tempids may be identified, so we also provide a map from tempid to a dense set
/// of contiguous integer labels.
pub(crate) fn temp_ids_in_allocations(&self, schema: &Schema) -> Result<BTreeMap<TempIdHandle, usize>> {
assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
assert!(self.upserts_ev.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
let mut temp_ids: BTreeSet<TempIdHandle> = BTreeSet::default();
let mut tempid_avs: BTreeMap<(Entid, TypedValueOr<TempIdHandle>), Vec<TempIdHandle>> = BTreeMap::default();
for term in self.allocations.iter() {
match term { | random_line_split |
||
upsert_resolution.rs | :db.unique/identity if it has failed to upsert.
/// - [:db/add TEMPID b v]. b may be :db.unique/identity if it has failed to upsert.
/// - [:db/add e b OTHERID].
allocations: Vec<TermWithTempIds>,
/// Entities that upserted and no longer reference tempids. These assertions are guaranteed to
/// be in the store.
upserted: Vec<TermWithoutTempIds>,
/// Entities that resolved due to other upserts and no longer reference tempids. These
/// assertions may or may not be in the store.
resolved: Vec<TermWithoutTempIds>,
}
#[derive(Clone,Debug,Default,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub(crate) struct FinalPopulations {
/// Upserts that upserted.
pub upserted: Vec<TermWithoutTempIds>,
/// Allocations that resolved due to other upserts.
pub resolved: Vec<TermWithoutTempIds>,
/// Allocations that required new entid allocations.
pub allocated: Vec<TermWithoutTempIds>,
}
impl Generation {
/// Split entities into a generation of populations that need to evolve to have their tempids
/// resolved or allocated, and a population of inert entities that do not reference tempids.
pub(crate) fn from<I>(terms: I, schema: &Schema) -> Result<(Generation, Population)> where I: IntoIterator<Item=TermWithTempIds> {
let mut generation = Generation::default();
let mut inert = vec![];
let is_unique = |a: Entid| -> Result<bool> {
let attribute: &Attribute = schema.require_attribute_for_entid(a)?;
Ok(attribute.unique == Some(attribute::Unique::Identity))
};
for term in terms.into_iter() {
match term {
Term::AddOrRetract(op, Right(e), a, Right(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_ev.push(UpsertEV(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Right(v)));
}
},
Term::AddOrRetract(op, Right(e), a, Left(v)) => {
if op == OpType::Add && is_unique(a)? {
generation.upserts_e.push(UpsertE(e, a, v));
} else {
generation.allocations.push(Term::AddOrRetract(op, Right(e), a, Left(v)));
}
},
Term::AddOrRetract(op, Left(e), a, Right(v)) => {
generation.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(v)));
},
Term::AddOrRetract(op, Left(e), a, Left(v)) => {
inert.push(Term::AddOrRetract(op, Left(e), a, Left(v)));
},
}
}
Ok((generation, inert))
}
/// Return true if it's possible to evolve this generation further.
///
/// Note that there can be complex upserts but no simple upserts to help resolve them, and in
/// this case, we cannot evolve further.
pub(crate) fn | (&self) -> bool {
!self.upserts_e.is_empty()
}
/// Evolve this generation one step further by rewriting the existing :db/add entities using the
/// given temporary IDs.
///
/// TODO: Considering doing this in place; the function already consumes `self`.
pub(crate) fn evolve_one_step(self, temp_id_map: &TempIdMap) -> Generation {
let mut next = Generation::default();
// We'll iterate our own allocations to resolve more things, but terms that have already
// resolved stay resolved.
next.resolved = self.resolved;
for UpsertE(t, a, v) in self.upserts_e {
match temp_id_map.get(&*t) {
Some(&n) => next.upserted.push(Term::AddOrRetract(OpType::Add, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(OpType::Add, Right(t), a, Left(v))),
}
}
for UpsertEV(t1, a, t2) in self.upserts_ev {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(_), Some(&n2)) => {
// Even though we can resolve entirely, it's possible that the remaining upsert
// could conflict. Moving straight to resolved doesn't give us a chance to
// search the store for the conflict.
next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0)))
},
(None, Some(&n2)) => next.upserts_e.push(UpsertE(t1, a, TypedValue::Ref(n2.0))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(OpType::Add, Left(n1), a, Right(t2))),
(None, None) => next.upserts_ev.push(UpsertEV(t1, a, t2))
}
}
// There's no particular need to separate resolved from allocations right here and right
// now, although it is convenient.
for term in self.allocations {
// TODO: find an expression that destructures less? I still expect this to be efficient
// but it's a little verbose.
match term {
Term::AddOrRetract(op, Right(t1), a, Right(t2)) => {
match (temp_id_map.get(&*t1), temp_id_map.get(&*t2)) {
(Some(&n1), Some(&n2)) => next.resolved.push(Term::AddOrRetract(op, n1, a, TypedValue::Ref(n2.0))),
(None, Some(&n2)) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Left(TypedValue::Ref(n2.0)))),
(Some(&n1), None) => next.allocations.push(Term::AddOrRetract(op, Left(n1), a, Right(t2))),
(None, None) => next.allocations.push(Term::AddOrRetract(op, Right(t1), a, Right(t2))),
}
},
Term::AddOrRetract(op, Right(t), a, Left(v)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, n, a, v)),
None => next.allocations.push(Term::AddOrRetract(op, Right(t), a, Left(v))),
}
},
Term::AddOrRetract(op, Left(e), a, Right(t)) => {
match temp_id_map.get(&*t) {
Some(&n) => next.resolved.push(Term::AddOrRetract(op, e, a, TypedValue::Ref(n.0))),
None => next.allocations.push(Term::AddOrRetract(op, Left(e), a, Right(t))),
}
},
Term::AddOrRetract(_, Left(_), _, Left(_)) => unreachable!(),
}
}
next
}
// Collect id->[a v] pairs that might upsert at this evolutionary step.
pub(crate) fn temp_id_avs<'a>(&'a self) -> Vec<(TempIdHandle, AVPair)> {
let mut temp_id_avs: Vec<(TempIdHandle, AVPair)> = vec![];
// TODO: map/collect.
for &UpsertE(ref t, ref a, ref v) in &self.upserts_e {
// TODO: figure out how to make this less expensive, i.e., don't require
// clone() of an arbitrary value.
temp_id_avs.push((t.clone(), (*a, v.clone())));
}
temp_id_avs
}
/// Evolve potential upserts that haven't resolved into allocations.
pub(crate) fn allocate_unresolved_upserts(&mut self) -> Result<()> {
let mut upserts_ev = vec![];
::std::mem::swap(&mut self.upserts_ev, &mut upserts_ev);
self.allocations.extend(upserts_ev.into_iter().map(|UpsertEV(t1, a, t2)| Term::AddOrRetract(OpType::Add, Right(t1), a, Right(t2))));
Ok(())
}
/// After evolution is complete, yield the set of tempids that require entid allocation.
///
/// Some of the tempids may be identified, so we also provide a map from tempid to a dense set
/// of contiguous integer labels.
pub(crate) fn temp_ids_in_allocations(&self, schema: &Schema) -> Result<BTreeMap<TempIdHandle, usize>> {
assert!(self.upserts_e.is_empty(), "All upserts should have been upserted, resolved, or moved to the allocated population!");
assert!(self.upserts_ev.is_empty(), "All upserts should have been | can_evolve | identifier_name |
test_main.py | #!/usr/bin/env python3
import glob
import os
import re
import subprocess
import pytest
@pytest.fixture(autouse=True, scope="module")
def | (request):
with pytest.raises(Exception):
subprocess.check_call(['certbot', '--version'])
try:
snap_folder = request.config.getoption("snap_folder")
snap_arch = request.config.getoption("snap_arch")
snap_path = glob.glob(os.path.join(snap_folder, 'certbot_*_{0}.snap'.format(snap_arch)))[0]
subprocess.check_call(['snap', 'install', '--classic', '--dangerous', snap_path])
subprocess.check_call(['certbot', '--version'])
yield
finally:
subprocess.call(['snap', 'remove', 'certbot'])
def test_dns_plugin_install(dns_snap_path):
"""
Test that each DNS plugin Certbot snap can be installed
and is usable with the Certbot snap.
"""
plugin_name = re.match(r'^certbot-(dns-\w+)_.*\.snap$',
os.path.basename(dns_snap_path)).group(1)
snap_name = 'certbot-{0}'.format(plugin_name)
assert plugin_name not in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
try:
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
subprocess.check_call(['snap', 'set', 'certbot', 'trust-plugin-with-root=ok'])
subprocess.check_call(['snap', 'connect', 'certbot:plugin', snap_name])
assert plugin_name in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
subprocess.check_call(['snap', 'connect', snap_name + ':certbot-metadata',
'certbot:certbot-metadata'])
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
finally:
subprocess.call(['snap', 'remove', plugin_name])
| install_certbot_snap | identifier_name |
test_main.py | #!/usr/bin/env python3
import glob
import os
import re
import subprocess
import pytest
@pytest.fixture(autouse=True, scope="module")
def install_certbot_snap(request):
with pytest.raises(Exception):
subprocess.check_call(['certbot', '--version'])
try:
snap_folder = request.config.getoption("snap_folder")
snap_arch = request.config.getoption("snap_arch")
snap_path = glob.glob(os.path.join(snap_folder, 'certbot_*_{0}.snap'.format(snap_arch)))[0]
subprocess.check_call(['snap', 'install', '--classic', '--dangerous', snap_path])
subprocess.check_call(['certbot', '--version'])
yield
finally:
subprocess.call(['snap', 'remove', 'certbot'])
def test_dns_plugin_install(dns_snap_path):
"""
Test that each DNS plugin Certbot snap can be installed
and is usable with the Certbot snap.
"""
plugin_name = re.match(r'^certbot-(dns-\w+)_.*\.snap$',
os.path.basename(dns_snap_path)).group(1)
snap_name = 'certbot-{0}'.format(plugin_name)
assert plugin_name not in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
try:
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
subprocess.check_call(['snap', 'set', 'certbot', 'trust-plugin-with-root=ok'])
subprocess.check_call(['snap', 'connect', 'certbot:plugin', snap_name])
assert plugin_name in subprocess.check_output(['certbot', 'plugins', '--prepare'], | universal_newlines=True)
subprocess.check_call(['snap', 'connect', snap_name + ':certbot-metadata',
'certbot:certbot-metadata'])
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
finally:
subprocess.call(['snap', 'remove', plugin_name]) | random_line_split |
|
test_main.py | #!/usr/bin/env python3
import glob
import os
import re
import subprocess
import pytest
@pytest.fixture(autouse=True, scope="module")
def install_certbot_snap(request):
|
def test_dns_plugin_install(dns_snap_path):
"""
Test that each DNS plugin Certbot snap can be installed
and is usable with the Certbot snap.
"""
plugin_name = re.match(r'^certbot-(dns-\w+)_.*\.snap$',
os.path.basename(dns_snap_path)).group(1)
snap_name = 'certbot-{0}'.format(plugin_name)
assert plugin_name not in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
try:
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
subprocess.check_call(['snap', 'set', 'certbot', 'trust-plugin-with-root=ok'])
subprocess.check_call(['snap', 'connect', 'certbot:plugin', snap_name])
assert plugin_name in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
subprocess.check_call(['snap', 'connect', snap_name + ':certbot-metadata',
'certbot:certbot-metadata'])
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
finally:
subprocess.call(['snap', 'remove', plugin_name])
| with pytest.raises(Exception):
subprocess.check_call(['certbot', '--version'])
try:
snap_folder = request.config.getoption("snap_folder")
snap_arch = request.config.getoption("snap_arch")
snap_path = glob.glob(os.path.join(snap_folder, 'certbot_*_{0}.snap'.format(snap_arch)))[0]
subprocess.check_call(['snap', 'install', '--classic', '--dangerous', snap_path])
subprocess.check_call(['certbot', '--version'])
yield
finally:
subprocess.call(['snap', 'remove', 'certbot']) | identifier_body |
early-vtbl-resolution.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait thing<A> {
fn foo(&self) -> Option<A>;
}
impl<A> thing<A> for int {
fn foo(&self) -> Option<A> { None }
}
fn foo_func<A, B: thing<A>>(x: B) -> Option<A> { x.foo() }
struct A { a: int }
pub fn main() {
for old_iter::eachi(&(Some(A {a: 0}))) |i, a| {
debug!("%u %d", i, a.a);
}
let _x: Option<float> = foo_func(0);
} | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// | random_line_split |
early-vtbl-resolution.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait thing<A> {
fn foo(&self) -> Option<A>;
}
impl<A> thing<A> for int {
fn foo(&self) -> Option<A> { None }
}
fn | <A, B: thing<A>>(x: B) -> Option<A> { x.foo() }
struct A { a: int }
pub fn main() {
for old_iter::eachi(&(Some(A {a: 0}))) |i, a| {
debug!("%u %d", i, a.a);
}
let _x: Option<float> = foo_func(0);
}
| foo_func | identifier_name |
early-vtbl-resolution.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait thing<A> {
fn foo(&self) -> Option<A>;
}
impl<A> thing<A> for int {
fn foo(&self) -> Option<A> { None }
}
fn foo_func<A, B: thing<A>>(x: B) -> Option<A> { x.foo() }
struct A { a: int }
pub fn main() |
let _x: Option<float> = foo_func(0);
}
| {
for old_iter::eachi(&(Some(A {a: 0}))) |i, a| {
debug!("%u %d", i, a.a);
} | identifier_body |
properties.ts | import {defineMIME, defineMode} from '../index';
defineMode('properties', () => ({
token: (stream, state) => {
const sol = stream.sol() || state.afterSection;
const eol = stream.eol();
state.afterSection = false;
if (sol) {
if (state.nextMultiline) {
state.inMultiline = true;
state.nextMultiline = false;
} else {
state.position = 'def';
}
}
if (eol && !state.nextMultiline) {
state.inMultiline = false;
state.position = 'def';
}
if (sol) {
while (stream.eatSpace()) {
}
}
const ch = stream.next();
if (sol && (ch === '#' || ch === '!' || ch === ';')) {
state.position = 'comment';
stream.skipToEnd();
return 'comment';
} else if (sol && ch === '[') | else if (ch === '=' || ch === ':') {
state.position = 'quote';
return null;
} else if (ch === '\\' && state.position === 'quote') {
if (stream.eol()) { // end of line?
// Multiline value
state.nextMultiline = true;
}
}
return state.position;
},
startState: () => {
return {
position: 'def', // Current position, "def", "quote" or "comment"
nextMultiline: false, // Is the next line multiline value
inMultiline: false, // Is the current line a multiline value
afterSection: false // Did we just open a section
};
}
}));
defineMIME('text/x-properties', 'properties');
defineMIME('text/x-ini', 'properties');
| {
state.afterSection = true;
stream.skipTo(']');
stream.eat(']');
return 'header';
} | conditional_block |
properties.ts | import {defineMIME, defineMode} from '../index';
defineMode('properties', () => ({
token: (stream, state) => {
const sol = stream.sol() || state.afterSection;
const eol = stream.eol();
state.afterSection = false;
if (sol) {
if (state.nextMultiline) {
state.inMultiline = true;
state.nextMultiline = false;
} else {
state.position = 'def';
}
}
if (eol && !state.nextMultiline) {
state.inMultiline = false;
state.position = 'def';
}
if (sol) {
while (stream.eatSpace()) {
}
}
const ch = stream.next();
if (sol && (ch === '#' || ch === '!' || ch === ';')) {
state.position = 'comment';
stream.skipToEnd();
return 'comment';
} else if (sol && ch === '[') { | state.afterSection = true;
stream.skipTo(']');
stream.eat(']');
return 'header';
} else if (ch === '=' || ch === ':') {
state.position = 'quote';
return null;
} else if (ch === '\\' && state.position === 'quote') {
if (stream.eol()) { // end of line?
// Multiline value
state.nextMultiline = true;
}
}
return state.position;
},
startState: () => {
return {
position: 'def', // Current position, "def", "quote" or "comment"
nextMultiline: false, // Is the next line multiline value
inMultiline: false, // Is the current line a multiline value
afterSection: false // Did we just open a section
};
}
}));
defineMIME('text/x-properties', 'properties');
defineMIME('text/x-ini', 'properties'); | random_line_split |
|
download.rs | use std::fmt::{self, Display, Formatter};
use uuid::Uuid;
/// Details of a package for downloading.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Package {
pub name: String,
pub version: String
}
impl Display for Package {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{} {}", self.name, self.version)
}
} | pub struct UpdateRequest {
pub requestId: Uuid,
pub status: RequestStatus,
pub packageId: Package,
pub installPos: i32,
pub createdAt: String,
}
/// The current status of an `UpdateRequest`.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub enum RequestStatus {
Pending,
InFlight,
Canceled,
Failed,
Finished
}
/// A notification from RVI that a new update is available.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct UpdateAvailable {
pub update_id: String,
pub signature: String,
pub description: String,
pub request_confirmation: bool,
pub size: u64
}
/// A notification to an external package manager that the package was downloaded.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadComplete {
pub update_id: Uuid,
pub update_image: String,
pub signature: String
}
/// A notification to an external package manager that the package download failed.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadFailed {
pub update_id: Uuid,
pub reason: String
} |
/// A request for the device to install a new update.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
#[allow(non_snake_case)] | random_line_split |
download.rs | use std::fmt::{self, Display, Formatter};
use uuid::Uuid;
/// Details of a package for downloading.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Package {
pub name: String,
pub version: String
}
impl Display for Package {
fn fmt(&self, f: &mut Formatter) -> fmt::Result |
}
/// A request for the device to install a new update.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
#[allow(non_snake_case)]
pub struct UpdateRequest {
pub requestId: Uuid,
pub status: RequestStatus,
pub packageId: Package,
pub installPos: i32,
pub createdAt: String,
}
/// The current status of an `UpdateRequest`.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub enum RequestStatus {
Pending,
InFlight,
Canceled,
Failed,
Finished
}
/// A notification from RVI that a new update is available.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct UpdateAvailable {
pub update_id: String,
pub signature: String,
pub description: String,
pub request_confirmation: bool,
pub size: u64
}
/// A notification to an external package manager that the package was downloaded.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadComplete {
pub update_id: Uuid,
pub update_image: String,
pub signature: String
}
/// A notification to an external package manager that the package download failed.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadFailed {
pub update_id: Uuid,
pub reason: String
}
| {
write!(f, "{} {}", self.name, self.version)
} | identifier_body |
download.rs | use std::fmt::{self, Display, Formatter};
use uuid::Uuid;
/// Details of a package for downloading.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Package {
pub name: String,
pub version: String
}
impl Display for Package {
fn | (&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{} {}", self.name, self.version)
}
}
/// A request for the device to install a new update.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
#[allow(non_snake_case)]
pub struct UpdateRequest {
pub requestId: Uuid,
pub status: RequestStatus,
pub packageId: Package,
pub installPos: i32,
pub createdAt: String,
}
/// The current status of an `UpdateRequest`.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub enum RequestStatus {
Pending,
InFlight,
Canceled,
Failed,
Finished
}
/// A notification from RVI that a new update is available.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct UpdateAvailable {
pub update_id: String,
pub signature: String,
pub description: String,
pub request_confirmation: bool,
pub size: u64
}
/// A notification to an external package manager that the package was downloaded.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadComplete {
pub update_id: Uuid,
pub update_image: String,
pub signature: String
}
/// A notification to an external package manager that the package download failed.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadFailed {
pub update_id: Uuid,
pub reason: String
}
| fmt | identifier_name |
0002_auto_20191121_1640.py | # Generated by Django 2.2.7 on 2019-11-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('smmapdfs_edit', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='pdfsandwichemailconnector',
name='administrative_unit', | ),
migrations.AlterField(
model_name='pdfsandwichfontconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichtypeconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
] | field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'), | random_line_split |
0002_auto_20191121_1640.py | # Generated by Django 2.2.7 on 2019-11-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class | (migrations.Migration):
dependencies = [
('smmapdfs_edit', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='pdfsandwichemailconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichfontconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichtypeconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
]
| Migration | identifier_name |
0002_auto_20191121_1640.py | # Generated by Django 2.2.7 on 2019-11-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
| ]
| dependencies = [
('smmapdfs_edit', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='pdfsandwichemailconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichfontconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
),
migrations.AlterField(
model_name='pdfsandwichtypeconnector',
name='administrative_unit',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='aklub.AdministrativeUnit'),
), | identifier_body |
ConfigSet.py | lists
"""
import copy, re, os
from waflib import Logs, Utils
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class ConfigSet(object):
"""
A dict that honor serialization and parent relationships. The serialization format
is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable.
The values can be accessed by attributes or by keys::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.FOO = 'test'
env['FOO'] = 'test'
"""
__slots__ = ('table', 'parent')
def __init__(self, filename=None):
self.table = {}
"""
Internal dict holding the object values
"""
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
"""
Enable the *in* syntax::
if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def keys(self):
"""Dict interface (unknown purpose)"""
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return keys
def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
def __getitem__(self, key):
"""
Dictionary interface: get value from key::
def configure(conf):
conf.env['foo'] = {}
print(env['foo'])
"""
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
"""
Dictionary interface: get value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
Dictionary interface: get value from key
"""
self[key] = []
def __getattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value = x
env['value'] = x
"""
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __delattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
del env.value
del env['value']
"""
if name in self.__slots__:
object.__delattr__(self, name)
else:
del self[name]
def derive(self):
"""
Returns a new ConfigSet deriving from self. The copy returned
will be a shallow copy::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.append_value('CFLAGS', ['-O2'])
child = env.derive()
child.CFLAGS.append('test') # warning! this will modify 'env'
child.CFLAGS = ['-O3'] # new list, ok
child.append_value('CFLAGS', ['-O3']) # ok
Use :py:func:`ConfigSet.detach` to detach the child from the parent.
"""
newenv = ConfigSet()
newenv.parent = self
return newenv
def detach(self):
"""
Detach self from its parent (if existing)
Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one.
"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.deepcopy(tbl[x])
self.table = tbl
return self
def get_flat(self, key):
"""
Return a value as a string. If the input is a list, the value returned is space-separated.
:param key: key to use
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""
Return a list value for further modification.
The list may be modified inplace and there is no need to do this afterwards::
self.table[var] = value
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, val):
"""
Appends a value to the specified config key::
def build(bld):
bld.env.append_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str): # if there were string everywhere we could optimize this
val = [val]
current_value = self._get_list_value_for_modification(var)
current_value.extend(val)
def prepend_value(self, var, val):
"""
Prepends a value to the specified item::
def configure(conf):
conf.env.prepend_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str):
|
self.table[var] = val + self._get_list_value_for_modification(var)
def append_unique(self, var, val):
"""
Append a value to the specified item only if it's not already present::
def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
current_value = self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
"""
Compute the merged dictionary from the fusion of self and all its parent
:rtype: a ConfigSet object
"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self, filename):
"""
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
:param filename: file to use
:type filename: string
"""
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf = []
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
try:
fun = ascii
except NameError:
fun = repr
for k in keys:
if k != 'undo_stack':
buf.append('%s = %s\n' % (k, fun(merged_table[k])))
Utils.writef(filename, ''.join(buf))
def load(self, filename):
"""
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
:param filename: file to use
:type filename: string
"""
tbl = self.table
code = Utils.readf(filename, m='rU')
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
def update(self, d):
"""
Dictionary interface: replace values from another dict
:param d: object to use the value from
:type d: dict-like object
"""
for k, v in d.items():
self[k] = v
def stash(self):
"""
Store the object state, to provide a kind of transaction support::
env = ConfigSet()
env.stash()
try:
env.append_value('CFLAGS', '-O3')
call_some_method(env)
finally:
| val = [val] | conditional_block |
ConfigSet.py | lists
"""
import copy, re, os
from waflib import Logs, Utils
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class ConfigSet(object):
"""
A dict that honor serialization and parent relationships. The serialization format
is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable.
The values can be accessed by attributes or by keys::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.FOO = 'test'
env['FOO'] = 'test'
"""
__slots__ = ('table', 'parent')
def __init__(self, filename=None):
self.table = {}
"""
Internal dict holding the object values
"""
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
"""
Enable the *in* syntax::
if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def keys(self):
"""Dict interface (unknown purpose)"""
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return keys
def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
def __getitem__(self, key):
"""
Dictionary interface: get value from key::
def configure(conf):
conf.env['foo'] = {}
print(env['foo'])
"""
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
"""
Dictionary interface: get value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
Dictionary interface: get value from key
"""
self[key] = []
def __getattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value = x
env['value'] = x
"""
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __delattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
del env.value
del env['value']
"""
if name in self.__slots__:
object.__delattr__(self, name)
else:
del self[name]
def derive(self):
"""
Returns a new ConfigSet deriving from self. The copy returned
will be a shallow copy::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.append_value('CFLAGS', ['-O2'])
child = env.derive()
child.CFLAGS.append('test') # warning! this will modify 'env'
child.CFLAGS = ['-O3'] # new list, ok
child.append_value('CFLAGS', ['-O3']) # ok
Use :py:func:`ConfigSet.detach` to detach the child from the parent.
"""
newenv = ConfigSet()
newenv.parent = self
return newenv
def detach(self):
"""
Detach self from its parent (if existing)
Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one.
"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.deepcopy(tbl[x])
self.table = tbl
return self
def get_flat(self, key):
"""
Return a value as a string. If the input is a list, the value returned is space-separated.
:param key: key to use
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""
Return a list value for further modification.
The list may be modified inplace and there is no need to do this afterwards::
self.table[var] = value
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, val):
"""
Appends a value to the specified config key::
def build(bld):
bld.env.append_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str): # if there were string everywhere we could optimize this
val = [val]
current_value = self._get_list_value_for_modification(var)
current_value.extend(val)
def prepend_value(self, var, val):
"""
Prepends a value to the specified item::
def configure(conf):
conf.env.prepend_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
self.table[var] = val + self._get_list_value_for_modification(var)
def append_unique(self, var, val):
"""
Append a value to the specified item only if it's not already present::
def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
current_value = self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
|
def store(self, filename):
"""
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
:param filename: file to use
:type filename: string
"""
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf = []
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
try:
fun = ascii
except NameError:
fun = repr
for k in keys:
if k != 'undo_stack':
buf.append('%s = %s\n' % (k, fun(merged_table[k])))
Utils.writef(filename, ''.join(buf))
def load(self, filename):
"""
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
:param filename: file to use
:type filename: string
"""
tbl = self.table
code = Utils.readf(filename, m='rU')
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
def update(self, d):
"""
Dictionary interface: replace values from another dict
:param d: object to use the value from
:type d: dict-like object
"""
for k, v in d.items():
self[k] = v
def stash(self):
"""
Store the object state, to provide a kind of transaction support::
env = ConfigSet()
env.stash()
try:
env.append_value('CFLAGS', '-O3')
call_some_method(env)
finally:
env | """
Compute the merged dictionary from the fusion of self and all its parent
:rtype: a ConfigSet object
"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table | identifier_body |
ConfigSet.py | lists
"""
import copy, re, os
from waflib import Logs, Utils
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class ConfigSet(object):
"""
A dict that honor serialization and parent relationships. The serialization format
is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable.
The values can be accessed by attributes or by keys::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.FOO = 'test'
env['FOO'] = 'test'
"""
__slots__ = ('table', 'parent')
def __init__(self, filename=None):
self.table = {}
"""
Internal dict holding the object values
"""
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
"""
Enable the *in* syntax::
if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def keys(self):
"""Dict interface (unknown purpose)"""
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return keys
def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
def __getitem__(self, key):
"""
Dictionary interface: get value from key::
def configure(conf):
conf.env['foo'] = {}
print(env['foo'])
"""
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
"""
Dictionary interface: get value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
Dictionary interface: get value from key
"""
self[key] = []
def __getattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value = x
env['value'] = x
"""
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __delattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
del env.value
del env['value']
"""
if name in self.__slots__:
object.__delattr__(self, name)
else:
del self[name]
def derive(self):
"""
Returns a new ConfigSet deriving from self. The copy returned
will be a shallow copy::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.append_value('CFLAGS', ['-O2'])
child = env.derive()
child.CFLAGS.append('test') # warning! this will modify 'env'
child.CFLAGS = ['-O3'] # new list, ok
child.append_value('CFLAGS', ['-O3']) # ok
Use :py:func:`ConfigSet.detach` to detach the child from the parent.
"""
newenv = ConfigSet()
newenv.parent = self
return newenv
def detach(self):
"""
Detach self from its parent (if existing)
Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one.
"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.deepcopy(tbl[x])
self.table = tbl
return self
| Return a value as a string. If the input is a list, the value returned is space-separated.
:param key: key to use
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""
Return a list value for further modification.
The list may be modified inplace and there is no need to do this afterwards::
self.table[var] = value
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, val):
"""
Appends a value to the specified config key::
def build(bld):
bld.env.append_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str): # if there were string everywhere we could optimize this
val = [val]
current_value = self._get_list_value_for_modification(var)
current_value.extend(val)
def prepend_value(self, var, val):
"""
Prepends a value to the specified item::
def configure(conf):
conf.env.prepend_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
self.table[var] = val + self._get_list_value_for_modification(var)
def append_unique(self, var, val):
"""
Append a value to the specified item only if it's not already present::
def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
current_value = self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
"""
Compute the merged dictionary from the fusion of self and all its parent
:rtype: a ConfigSet object
"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self, filename):
"""
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
:param filename: file to use
:type filename: string
"""
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf = []
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
try:
fun = ascii
except NameError:
fun = repr
for k in keys:
if k != 'undo_stack':
buf.append('%s = %s\n' % (k, fun(merged_table[k])))
Utils.writef(filename, ''.join(buf))
def load(self, filename):
"""
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
:param filename: file to use
:type filename: string
"""
tbl = self.table
code = Utils.readf(filename, m='rU')
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
def update(self, d):
"""
Dictionary interface: replace values from another dict
:param d: object to use the value from
:type d: dict-like object
"""
for k, v in d.items():
self[k] = v
def stash(self):
"""
Store the object state, to provide a kind of transaction support::
env = ConfigSet()
env.stash()
try:
env.append_value('CFLAGS', '-O3')
call_some_method(env)
finally:
| def get_flat(self, key):
""" | random_line_split |
ConfigSet.py | lists
"""
import copy, re, os
from waflib import Logs, Utils
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class ConfigSet(object):
"""
A dict that honor serialization and parent relationships. The serialization format
is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable.
The values can be accessed by attributes or by keys::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.FOO = 'test'
env['FOO'] = 'test'
"""
__slots__ = ('table', 'parent')
def __init__(self, filename=None):
self.table = {}
"""
Internal dict holding the object values
"""
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
"""
Enable the *in* syntax::
if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def | (self):
"""Dict interface (unknown purpose)"""
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return keys
def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
def __getitem__(self, key):
"""
Dictionary interface: get value from key::
def configure(conf):
conf.env['foo'] = {}
print(env['foo'])
"""
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
"""
Dictionary interface: get value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
Dictionary interface: get value from key
"""
self[key] = []
def __getattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value = x
env['value'] = x
"""
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __delattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
del env.value
del env['value']
"""
if name in self.__slots__:
object.__delattr__(self, name)
else:
del self[name]
def derive(self):
"""
Returns a new ConfigSet deriving from self. The copy returned
will be a shallow copy::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.append_value('CFLAGS', ['-O2'])
child = env.derive()
child.CFLAGS.append('test') # warning! this will modify 'env'
child.CFLAGS = ['-O3'] # new list, ok
child.append_value('CFLAGS', ['-O3']) # ok
Use :py:func:`ConfigSet.detach` to detach the child from the parent.
"""
newenv = ConfigSet()
newenv.parent = self
return newenv
def detach(self):
"""
Detach self from its parent (if existing)
Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one.
"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.deepcopy(tbl[x])
self.table = tbl
return self
def get_flat(self, key):
"""
Return a value as a string. If the input is a list, the value returned is space-separated.
:param key: key to use
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""
Return a list value for further modification.
The list may be modified inplace and there is no need to do this afterwards::
self.table[var] = value
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, val):
"""
Appends a value to the specified config key::
def build(bld):
bld.env.append_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str): # if there were string everywhere we could optimize this
val = [val]
current_value = self._get_list_value_for_modification(var)
current_value.extend(val)
def prepend_value(self, var, val):
"""
Prepends a value to the specified item::
def configure(conf):
conf.env.prepend_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
self.table[var] = val + self._get_list_value_for_modification(var)
def append_unique(self, var, val):
"""
Append a value to the specified item only if it's not already present::
def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
current_value = self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
"""
Compute the merged dictionary from the fusion of self and all its parent
:rtype: a ConfigSet object
"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self, filename):
"""
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
:param filename: file to use
:type filename: string
"""
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf = []
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
try:
fun = ascii
except NameError:
fun = repr
for k in keys:
if k != 'undo_stack':
buf.append('%s = %s\n' % (k, fun(merged_table[k])))
Utils.writef(filename, ''.join(buf))
def load(self, filename):
"""
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
:param filename: file to use
:type filename: string
"""
tbl = self.table
code = Utils.readf(filename, m='rU')
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
def update(self, d):
"""
Dictionary interface: replace values from another dict
:param d: object to use the value from
:type d: dict-like object
"""
for k, v in d.items():
self[k] = v
def stash(self):
"""
Store the object state, to provide a kind of transaction support::
env = ConfigSet()
env.stash()
try:
env.append_value('CFLAGS', '-O3')
call_some_method(env)
finally:
| keys | identifier_name |
imgwin.rs | use glium;
use glium::index::PrimitiveType;
use glium::Surface;
use image;
use std::time::{Duration, Instant};
use glium::glutin::event::{ElementState, Event, StartCause, VirtualKeyCode, WindowEvent};
use glium::glutin::event_loop::{ControlFlow, EventLoop};
use glium::glutin::window::WindowBuilder;
use glium::glutin::ContextBuilder;
use glium::texture::{CompressedSrgbTexture2d, RawImage2d};
use glium::{implement_vertex, program, uniform};
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
tex_coords: [f32; 2],
}
implement_vertex!(Vertex, position, tex_coords);
/// An Application creates windows and runs a main loop
pub struct Application {
main_loop: EventLoop<()>,
}
impl Application {
pub fn new() -> Application |
pub fn new_window(&self, title: impl Into<String>) -> ImgWindow {
ImgWindow::new(title, &self.main_loop)
}
/// Execute the main loop without ever returning. Events are delegated to the given `handler`
/// and `handler.next_frame` is called `fps` times per seconds.
/// Whenever `handler.should_exit` turns true, the program exit.
pub fn run<T: MainloopHandler + 'static>(self, mut handler: T, fps: u32) -> ! {
self.main_loop.run(move |event, _, control_flow| {
let now = Instant::now();
match event {
Event::WindowEvent {
event: win_event, ..
} => match win_event {
WindowEvent::CloseRequested => {
handler.close_event();
}
WindowEvent::KeyboardInput { input, .. }
if input.state == ElementState::Pressed =>
{
handler.key_event(input.virtual_keycode)
}
_ => (),
},
Event::NewEvents(StartCause::ResumeTimeReached { .. })
| Event::NewEvents(StartCause::Init) => handler.next_frame(),
_ => (),
}
if handler.should_exit() {
*control_flow = ControlFlow::Exit;
handler.on_exit();
} else {
*control_flow =
ControlFlow::WaitUntil(now + Duration::from_secs_f32(1f32 / fps as f32));
}
});
}
}
/// Shows a image with help of opengl (glium)
pub struct ImgWindow {
texture: Option<CompressedSrgbTexture2d>,
pub facade: glium::Display,
vertex_buffer: glium::VertexBuffer<Vertex>,
index_buffer: glium::IndexBuffer<u16>,
program: glium::Program,
}
/// Implement this trait for handling events that occurs in the main loop
/// and control when the main loop exit.
pub trait MainloopHandler {
/// Get called whenever a window is closed.
fn close_event(&mut self);
/// Get called whenever a key is pressed.
fn key_event(&mut self, inp: Option<VirtualKeyCode>);
/// Should return true if the main loop should exit.
/// Get called after every other event.
fn should_exit(&self) -> bool;
/// Get called when the next frame should be drawn.
fn next_frame(&mut self);
/// Get called before the main loops end
fn on_exit(&mut self);
}
impl ImgWindow {
fn new<T: Into<String>>(title: T, main_loop: &EventLoop<()>) -> ImgWindow {
let wb = WindowBuilder::new().with_title(title.into());
let cb = ContextBuilder::new().with_vsync(true);
let display = glium::Display::new(wb, cb, &main_loop).unwrap();
// vertex for a rect for drawing an image to the whole window
let vertex_buffer = glium::VertexBuffer::new(
&display,
&[
Vertex {
position: [-1.0, -1.0],
tex_coords: [0.0, 0.0],
},
Vertex {
position: [-1.0, 1.0],
tex_coords: [0.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
tex_coords: [1.0, 1.0],
},
Vertex {
position: [1.0, -1.0],
tex_coords: [1.0, 0.0],
},
],
)
.unwrap();
let index_buffer =
glium::IndexBuffer::new(&display, PrimitiveType::TriangleStrip, &[1 as u16, 2, 0, 3])
.unwrap();
// just enough shader for drawing images
let program = program!(&display,
140 => {
vertex: "
#version 140
uniform lowp mat4 matrix;
in vec2 position;
in vec2 tex_coords;
out vec2 v_tex_coords;
void main(){
gl_Position = matrix * vec4(position, 0.0, 1.0);
v_tex_coords = tex_coords;
}
",
fragment: "
#version 140
uniform sampler2D tex;
in vec2 v_tex_coords;
out vec4 f_color;
void main(){
f_color = texture(tex, v_tex_coords);
}
"
},)
.unwrap();
ImgWindow {
texture: None,
facade: display,
vertex_buffer: vertex_buffer,
index_buffer: index_buffer,
program: program,
}
}
/// Changes the image which should be drawn to this window. Call `redraw` to show this image
/// to the user.
pub fn set_img(&mut self, img: image::RgbaImage) {
let dim = img.dimensions();
let text = RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dim);
self.texture = CompressedSrgbTexture2d::new(&self.facade, text).ok();
}
/// Redraws using opengl
pub fn redraw(&self) {
let mut target = self.facade.draw();
target.clear_color(0.0, 0.0, 0.0, 0.0);
if let Some(ref texture) = self.texture {
let uniforms = uniform! {
matrix: [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0f32]
],
tex: texture
};
target
.draw(
&self.vertex_buffer,
&self.index_buffer,
&self.program,
&uniforms,
&Default::default(),
)
.unwrap();
}
target.finish().unwrap();
// self.facade.swap_buffers().unwrap();
}
}
| {
Application {
main_loop: EventLoop::new(),
}
} | identifier_body |
imgwin.rs | use glium;
use glium::index::PrimitiveType;
use glium::Surface;
use image;
use std::time::{Duration, Instant};
use glium::glutin::event::{ElementState, Event, StartCause, VirtualKeyCode, WindowEvent};
use glium::glutin::event_loop::{ControlFlow, EventLoop};
use glium::glutin::window::WindowBuilder;
use glium::glutin::ContextBuilder;
use glium::texture::{CompressedSrgbTexture2d, RawImage2d};
use glium::{implement_vertex, program, uniform};
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
tex_coords: [f32; 2],
}
implement_vertex!(Vertex, position, tex_coords);
/// An Application creates windows and runs a main loop
pub struct Application {
main_loop: EventLoop<()>,
}
impl Application {
pub fn new() -> Application {
Application {
main_loop: EventLoop::new(),
}
}
pub fn new_window(&self, title: impl Into<String>) -> ImgWindow {
ImgWindow::new(title, &self.main_loop)
}
/// Execute the main loop without ever returning. Events are delegated to the given `handler`
/// and `handler.next_frame` is called `fps` times per seconds.
/// Whenever `handler.should_exit` turns true, the program exit.
pub fn run<T: MainloopHandler + 'static>(self, mut handler: T, fps: u32) -> ! {
self.main_loop.run(move |event, _, control_flow| {
let now = Instant::now();
match event {
Event::WindowEvent {
event: win_event, ..
} => match win_event {
WindowEvent::CloseRequested => {
handler.close_event();
}
WindowEvent::KeyboardInput { input, .. }
if input.state == ElementState::Pressed =>
|
_ => (),
},
Event::NewEvents(StartCause::ResumeTimeReached { .. })
| Event::NewEvents(StartCause::Init) => handler.next_frame(),
_ => (),
}
if handler.should_exit() {
*control_flow = ControlFlow::Exit;
handler.on_exit();
} else {
*control_flow =
ControlFlow::WaitUntil(now + Duration::from_secs_f32(1f32 / fps as f32));
}
});
}
}
/// Shows a image with help of opengl (glium)
pub struct ImgWindow {
texture: Option<CompressedSrgbTexture2d>,
pub facade: glium::Display,
vertex_buffer: glium::VertexBuffer<Vertex>,
index_buffer: glium::IndexBuffer<u16>,
program: glium::Program,
}
/// Implement this trait for handling events that occurs in the main loop
/// and control when the main loop exit.
pub trait MainloopHandler {
/// Get called whenever a window is closed.
fn close_event(&mut self);
/// Get called whenever a key is pressed.
fn key_event(&mut self, inp: Option<VirtualKeyCode>);
/// Should return true if the main loop should exit.
/// Get called after every other event.
fn should_exit(&self) -> bool;
/// Get called when the next frame should be drawn.
fn next_frame(&mut self);
/// Get called before the main loops end
fn on_exit(&mut self);
}
impl ImgWindow {
fn new<T: Into<String>>(title: T, main_loop: &EventLoop<()>) -> ImgWindow {
let wb = WindowBuilder::new().with_title(title.into());
let cb = ContextBuilder::new().with_vsync(true);
let display = glium::Display::new(wb, cb, &main_loop).unwrap();
// vertex for a rect for drawing an image to the whole window
let vertex_buffer = glium::VertexBuffer::new(
&display,
&[
Vertex {
position: [-1.0, -1.0],
tex_coords: [0.0, 0.0],
},
Vertex {
position: [-1.0, 1.0],
tex_coords: [0.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
tex_coords: [1.0, 1.0],
},
Vertex {
position: [1.0, -1.0],
tex_coords: [1.0, 0.0],
},
],
)
.unwrap();
let index_buffer =
glium::IndexBuffer::new(&display, PrimitiveType::TriangleStrip, &[1 as u16, 2, 0, 3])
.unwrap();
// just enough shader for drawing images
let program = program!(&display,
140 => {
vertex: "
#version 140
uniform lowp mat4 matrix;
in vec2 position;
in vec2 tex_coords;
out vec2 v_tex_coords;
void main(){
gl_Position = matrix * vec4(position, 0.0, 1.0);
v_tex_coords = tex_coords;
}
",
fragment: "
#version 140
uniform sampler2D tex;
in vec2 v_tex_coords;
out vec4 f_color;
void main(){
f_color = texture(tex, v_tex_coords);
}
"
},)
.unwrap();
ImgWindow {
texture: None,
facade: display,
vertex_buffer: vertex_buffer,
index_buffer: index_buffer,
program: program,
}
}
/// Changes the image which should be drawn to this window. Call `redraw` to show this image
/// to the user.
pub fn set_img(&mut self, img: image::RgbaImage) {
let dim = img.dimensions();
let text = RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dim);
self.texture = CompressedSrgbTexture2d::new(&self.facade, text).ok();
}
/// Redraws using opengl
pub fn redraw(&self) {
let mut target = self.facade.draw();
target.clear_color(0.0, 0.0, 0.0, 0.0);
if let Some(ref texture) = self.texture {
let uniforms = uniform! {
matrix: [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0f32]
],
tex: texture
};
target
.draw(
&self.vertex_buffer,
&self.index_buffer,
&self.program,
&uniforms,
&Default::default(),
)
.unwrap();
}
target.finish().unwrap();
// self.facade.swap_buffers().unwrap();
}
}
| {
handler.key_event(input.virtual_keycode)
} | conditional_block |
imgwin.rs | use glium;
use glium::index::PrimitiveType;
use glium::Surface;
use image;
use std::time::{Duration, Instant};
use glium::glutin::event::{ElementState, Event, StartCause, VirtualKeyCode, WindowEvent};
use glium::glutin::event_loop::{ControlFlow, EventLoop};
use glium::glutin::window::WindowBuilder;
use glium::glutin::ContextBuilder; | struct Vertex {
position: [f32; 2],
tex_coords: [f32; 2],
}
implement_vertex!(Vertex, position, tex_coords);
/// An Application creates windows and runs a main loop
pub struct Application {
main_loop: EventLoop<()>,
}
impl Application {
pub fn new() -> Application {
Application {
main_loop: EventLoop::new(),
}
}
pub fn new_window(&self, title: impl Into<String>) -> ImgWindow {
ImgWindow::new(title, &self.main_loop)
}
/// Execute the main loop without ever returning. Events are delegated to the given `handler`
/// and `handler.next_frame` is called `fps` times per seconds.
/// Whenever `handler.should_exit` turns true, the program exit.
pub fn run<T: MainloopHandler + 'static>(self, mut handler: T, fps: u32) -> ! {
self.main_loop.run(move |event, _, control_flow| {
let now = Instant::now();
match event {
Event::WindowEvent {
event: win_event, ..
} => match win_event {
WindowEvent::CloseRequested => {
handler.close_event();
}
WindowEvent::KeyboardInput { input, .. }
if input.state == ElementState::Pressed =>
{
handler.key_event(input.virtual_keycode)
}
_ => (),
},
Event::NewEvents(StartCause::ResumeTimeReached { .. })
| Event::NewEvents(StartCause::Init) => handler.next_frame(),
_ => (),
}
if handler.should_exit() {
*control_flow = ControlFlow::Exit;
handler.on_exit();
} else {
*control_flow =
ControlFlow::WaitUntil(now + Duration::from_secs_f32(1f32 / fps as f32));
}
});
}
}
/// Shows a image with help of opengl (glium)
pub struct ImgWindow {
texture: Option<CompressedSrgbTexture2d>,
pub facade: glium::Display,
vertex_buffer: glium::VertexBuffer<Vertex>,
index_buffer: glium::IndexBuffer<u16>,
program: glium::Program,
}
/// Implement this trait for handling events that occurs in the main loop
/// and control when the main loop exit.
pub trait MainloopHandler {
/// Get called whenever a window is closed.
fn close_event(&mut self);
/// Get called whenever a key is pressed.
fn key_event(&mut self, inp: Option<VirtualKeyCode>);
/// Should return true if the main loop should exit.
/// Get called after every other event.
fn should_exit(&self) -> bool;
/// Get called when the next frame should be drawn.
fn next_frame(&mut self);
/// Get called before the main loops end
fn on_exit(&mut self);
}
impl ImgWindow {
fn new<T: Into<String>>(title: T, main_loop: &EventLoop<()>) -> ImgWindow {
let wb = WindowBuilder::new().with_title(title.into());
let cb = ContextBuilder::new().with_vsync(true);
let display = glium::Display::new(wb, cb, &main_loop).unwrap();
// vertex for a rect for drawing an image to the whole window
let vertex_buffer = glium::VertexBuffer::new(
&display,
&[
Vertex {
position: [-1.0, -1.0],
tex_coords: [0.0, 0.0],
},
Vertex {
position: [-1.0, 1.0],
tex_coords: [0.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
tex_coords: [1.0, 1.0],
},
Vertex {
position: [1.0, -1.0],
tex_coords: [1.0, 0.0],
},
],
)
.unwrap();
let index_buffer =
glium::IndexBuffer::new(&display, PrimitiveType::TriangleStrip, &[1 as u16, 2, 0, 3])
.unwrap();
// just enough shader for drawing images
let program = program!(&display,
140 => {
vertex: "
#version 140
uniform lowp mat4 matrix;
in vec2 position;
in vec2 tex_coords;
out vec2 v_tex_coords;
void main(){
gl_Position = matrix * vec4(position, 0.0, 1.0);
v_tex_coords = tex_coords;
}
",
fragment: "
#version 140
uniform sampler2D tex;
in vec2 v_tex_coords;
out vec4 f_color;
void main(){
f_color = texture(tex, v_tex_coords);
}
"
},)
.unwrap();
ImgWindow {
texture: None,
facade: display,
vertex_buffer: vertex_buffer,
index_buffer: index_buffer,
program: program,
}
}
/// Changes the image which should be drawn to this window. Call `redraw` to show this image
/// to the user.
pub fn set_img(&mut self, img: image::RgbaImage) {
let dim = img.dimensions();
let text = RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dim);
self.texture = CompressedSrgbTexture2d::new(&self.facade, text).ok();
}
/// Redraws using opengl
pub fn redraw(&self) {
let mut target = self.facade.draw();
target.clear_color(0.0, 0.0, 0.0, 0.0);
if let Some(ref texture) = self.texture {
let uniforms = uniform! {
matrix: [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0f32]
],
tex: texture
};
target
.draw(
&self.vertex_buffer,
&self.index_buffer,
&self.program,
&uniforms,
&Default::default(),
)
.unwrap();
}
target.finish().unwrap();
// self.facade.swap_buffers().unwrap();
}
} | use glium::texture::{CompressedSrgbTexture2d, RawImage2d};
use glium::{implement_vertex, program, uniform};
#[derive(Copy, Clone)] | random_line_split |
imgwin.rs | use glium;
use glium::index::PrimitiveType;
use glium::Surface;
use image;
use std::time::{Duration, Instant};
use glium::glutin::event::{ElementState, Event, StartCause, VirtualKeyCode, WindowEvent};
use glium::glutin::event_loop::{ControlFlow, EventLoop};
use glium::glutin::window::WindowBuilder;
use glium::glutin::ContextBuilder;
use glium::texture::{CompressedSrgbTexture2d, RawImage2d};
use glium::{implement_vertex, program, uniform};
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
tex_coords: [f32; 2],
}
implement_vertex!(Vertex, position, tex_coords);
/// An Application creates windows and runs a main loop
pub struct Application {
main_loop: EventLoop<()>,
}
impl Application {
pub fn new() -> Application {
Application {
main_loop: EventLoop::new(),
}
}
pub fn new_window(&self, title: impl Into<String>) -> ImgWindow {
ImgWindow::new(title, &self.main_loop)
}
/// Execute the main loop without ever returning. Events are delegated to the given `handler`
/// and `handler.next_frame` is called `fps` times per seconds.
/// Whenever `handler.should_exit` turns true, the program exit.
pub fn run<T: MainloopHandler + 'static>(self, mut handler: T, fps: u32) -> ! {
self.main_loop.run(move |event, _, control_flow| {
let now = Instant::now();
match event {
Event::WindowEvent {
event: win_event, ..
} => match win_event {
WindowEvent::CloseRequested => {
handler.close_event();
}
WindowEvent::KeyboardInput { input, .. }
if input.state == ElementState::Pressed =>
{
handler.key_event(input.virtual_keycode)
}
_ => (),
},
Event::NewEvents(StartCause::ResumeTimeReached { .. })
| Event::NewEvents(StartCause::Init) => handler.next_frame(),
_ => (),
}
if handler.should_exit() {
*control_flow = ControlFlow::Exit;
handler.on_exit();
} else {
*control_flow =
ControlFlow::WaitUntil(now + Duration::from_secs_f32(1f32 / fps as f32));
}
});
}
}
/// Shows a image with help of opengl (glium)
pub struct ImgWindow {
texture: Option<CompressedSrgbTexture2d>,
pub facade: glium::Display,
vertex_buffer: glium::VertexBuffer<Vertex>,
index_buffer: glium::IndexBuffer<u16>,
program: glium::Program,
}
/// Implement this trait for handling events that occurs in the main loop
/// and control when the main loop exit.
pub trait MainloopHandler {
/// Get called whenever a window is closed.
fn close_event(&mut self);
/// Get called whenever a key is pressed.
fn key_event(&mut self, inp: Option<VirtualKeyCode>);
/// Should return true if the main loop should exit.
/// Get called after every other event.
fn should_exit(&self) -> bool;
/// Get called when the next frame should be drawn.
fn next_frame(&mut self);
/// Get called before the main loops end
fn on_exit(&mut self);
}
impl ImgWindow {
fn new<T: Into<String>>(title: T, main_loop: &EventLoop<()>) -> ImgWindow {
let wb = WindowBuilder::new().with_title(title.into());
let cb = ContextBuilder::new().with_vsync(true);
let display = glium::Display::new(wb, cb, &main_loop).unwrap();
// vertex for a rect for drawing an image to the whole window
let vertex_buffer = glium::VertexBuffer::new(
&display,
&[
Vertex {
position: [-1.0, -1.0],
tex_coords: [0.0, 0.0],
},
Vertex {
position: [-1.0, 1.0],
tex_coords: [0.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
tex_coords: [1.0, 1.0],
},
Vertex {
position: [1.0, -1.0],
tex_coords: [1.0, 0.0],
},
],
)
.unwrap();
let index_buffer =
glium::IndexBuffer::new(&display, PrimitiveType::TriangleStrip, &[1 as u16, 2, 0, 3])
.unwrap();
// just enough shader for drawing images
let program = program!(&display,
140 => {
vertex: "
#version 140
uniform lowp mat4 matrix;
in vec2 position;
in vec2 tex_coords;
out vec2 v_tex_coords;
void main(){
gl_Position = matrix * vec4(position, 0.0, 1.0);
v_tex_coords = tex_coords;
}
",
fragment: "
#version 140
uniform sampler2D tex;
in vec2 v_tex_coords;
out vec4 f_color;
void main(){
f_color = texture(tex, v_tex_coords);
}
"
},)
.unwrap();
ImgWindow {
texture: None,
facade: display,
vertex_buffer: vertex_buffer,
index_buffer: index_buffer,
program: program,
}
}
/// Changes the image which should be drawn to this window. Call `redraw` to show this image
/// to the user.
pub fn set_img(&mut self, img: image::RgbaImage) {
let dim = img.dimensions();
let text = RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dim);
self.texture = CompressedSrgbTexture2d::new(&self.facade, text).ok();
}
/// Redraws using opengl
pub fn | (&self) {
let mut target = self.facade.draw();
target.clear_color(0.0, 0.0, 0.0, 0.0);
if let Some(ref texture) = self.texture {
let uniforms = uniform! {
matrix: [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0f32]
],
tex: texture
};
target
.draw(
&self.vertex_buffer,
&self.index_buffer,
&self.program,
&uniforms,
&Default::default(),
)
.unwrap();
}
target.finish().unwrap();
// self.facade.swap_buffers().unwrap();
}
}
| redraw | identifier_name |
router.js | 'use strict'
require('./controllers/listCtrl.js');
require('./controllers/loginCtrl.js');
require('./services/pageService.js');
angular.module('app.router',
['ui.router', 'app.list', 'app.login'])
.config(configFn);
configFn.$inject = ['$locationProvider', '$stateProvider', '$urlRouterProvider'];
function configFn($locationProvider, $stateProvider, $urlRouterProvider){
$urlRouterProvider.when('', '/');
$urlRouterProvider.otherwise("/404");
$stateProvider
.state('list', {
url: "/",
template: require('ng-cache!./views/list.html'),
// controller: 'listCtrl'
})
.state('signin', {
url: "/login",
template: require('ng-cache!./views/login.html'),
// controller: 'loginCtrl'
})
.state('404', {
url: "/404",
template: require('ng-cache!./views/404.html'),
controller: function(pageService) {
pageService.setTitle('404');
} | } | }); | random_line_split |
router.js | 'use strict'
require('./controllers/listCtrl.js');
require('./controllers/loginCtrl.js');
require('./services/pageService.js');
angular.module('app.router',
['ui.router', 'app.list', 'app.login'])
.config(configFn);
configFn.$inject = ['$locationProvider', '$stateProvider', '$urlRouterProvider'];
function configFn($locationProvider, $stateProvider, $urlRouterProvider) | controller: function(pageService) {
pageService.setTitle('404');
}
});
}
| {
$urlRouterProvider.when('', '/');
$urlRouterProvider.otherwise("/404");
$stateProvider
.state('list', {
url: "/",
template: require('ng-cache!./views/list.html'),
// controller: 'listCtrl'
})
.state('signin', {
url: "/login",
template: require('ng-cache!./views/login.html'),
// controller: 'loginCtrl'
})
.state('404', {
url: "/404",
template: require('ng-cache!./views/404.html'), | identifier_body |
router.js | 'use strict'
require('./controllers/listCtrl.js');
require('./controllers/loginCtrl.js');
require('./services/pageService.js');
angular.module('app.router',
['ui.router', 'app.list', 'app.login'])
.config(configFn);
configFn.$inject = ['$locationProvider', '$stateProvider', '$urlRouterProvider'];
function | ($locationProvider, $stateProvider, $urlRouterProvider){
$urlRouterProvider.when('', '/');
$urlRouterProvider.otherwise("/404");
$stateProvider
.state('list', {
url: "/",
template: require('ng-cache!./views/list.html'),
// controller: 'listCtrl'
})
.state('signin', {
url: "/login",
template: require('ng-cache!./views/login.html'),
// controller: 'loginCtrl'
})
.state('404', {
url: "/404",
template: require('ng-cache!./views/404.html'),
controller: function(pageService) {
pageService.setTitle('404');
}
});
} | configFn | identifier_name |
common.py | """
Define common steps for instructor dashboard acceptance tests.
"""
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from lettuce import step, world
from mock import patch
from nose.tools import assert_in
from courseware.tests.factories import InstructorFactory, StaffFactory
@step(u'Given I am "([^"]*)" for a very large course')
def make_staff_or_instructor_for_large_course(step, role):
make_large_course(step, role)
@patch.dict('courseware.access.settings.FEATURES', {"MAX_ENROLLMENT_INSTR_BUTTONS": 0})
def make_large_course(step, role):
i_am_staff_or_instructor(step, role)
@step(u'Given I am "([^"]*)" for a course')
def i_am_staff_or_instructor(step, role): # pylint: disable=unused-argument
## In summary: makes a test course, makes a new Staff or Instructor user
## (depending on `role`), and logs that user in to the course
# Store the role
assert_in(role, ['instructor', 'staff'])
# Clear existing courses to avoid conflicts
world.clear_courses()
# Create a new course
course = world.CourseFactory.create(
org='edx',
number='999',
display_name='Test Course'
)
world.course_key = course.id
world.role = 'instructor'
# Log in as the an instructor or staff for the course
if role == 'instructor':
# Make & register an instructor for the course
world.instructor = InstructorFactory(course_key=world.course_key)
world.enroll_user(world.instructor, world.course_key)
world.log_in(
username=world.instructor.username,
password='test',
email=world.instructor.email,
name=world.instructor.profile.name
)
else:
world.role = 'staff'
# Make & register a staff member
world.staff = StaffFactory(course_key=world.course_key)
world.enroll_user(world.staff, world.course_key)
world.log_in(
username=world.staff.username,
password='test',
email=world.staff.email,
name=world.staff.profile.name
)
def go_to_section(section_name):
# section name should be one of
# course_info, membership, student_admin, data_download, analytics, send_email
world.visit(u'/courses/{}'.format(world.course_key))
world.css_click(u'a[href="/courses/{}/instructor"]'.format(world.course_key))
world.css_click('[data-section="{0}"]'.format(section_name))
@step(u'I click "([^"]*)"')
def click_a_button(step, button): # pylint: disable=unused-argument
if button == "Generate Grade Report":
# Go to the data download section of the instructor dash
go_to_section("data_download")
# Click generate grade report button
world.css_click('input[name="calculate-grades-csv"]')
# Expect to see a message that grade report is being generated
expected_msg = "The grade report is being created." \
" To view the status of the report, see" \
" Pending Tasks below."
world.wait_for_visible('#report-request-response')
assert_in(
expected_msg, world.css_text('#report-request-response'), | )
elif button == "Grading Configuration":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="dump-gradeconf"]')
elif button == "List enrolled students' profile information":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles"]')
elif button == "Download profile information as a CSV":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles-csv"]')
else:
raise ValueError("Unrecognized button option " + button)
@step(u'I visit the "([^"]*)" tab')
def click_a_button(step, tab_name): # pylint: disable=unused-argument
# course_info, membership, student_admin, data_download, analytics, send_email
tab_name_dict = {
'Course Info': 'course_info',
'Membership': 'membership',
'Student Admin': 'student_admin',
'Data Download': 'data_download',
'Analytics': 'analytics',
'Email': 'send_email',
}
go_to_section(tab_name_dict[tab_name]) | msg="Could not find grade report generation success message." | random_line_split |
common.py | """
Define common steps for instructor dashboard acceptance tests.
"""
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from lettuce import step, world
from mock import patch
from nose.tools import assert_in
from courseware.tests.factories import InstructorFactory, StaffFactory
@step(u'Given I am "([^"]*)" for a very large course')
def make_staff_or_instructor_for_large_course(step, role):
make_large_course(step, role)
@patch.dict('courseware.access.settings.FEATURES', {"MAX_ENROLLMENT_INSTR_BUTTONS": 0})
def make_large_course(step, role):
i_am_staff_or_instructor(step, role)
@step(u'Given I am "([^"]*)" for a course')
def i_am_staff_or_instructor(step, role): # pylint: disable=unused-argument
## In summary: makes a test course, makes a new Staff or Instructor user
## (depending on `role`), and logs that user in to the course
# Store the role
assert_in(role, ['instructor', 'staff'])
# Clear existing courses to avoid conflicts
world.clear_courses()
# Create a new course
course = world.CourseFactory.create(
org='edx',
number='999',
display_name='Test Course'
)
world.course_key = course.id
world.role = 'instructor'
# Log in as the an instructor or staff for the course
if role == 'instructor':
# Make & register an instructor for the course
world.instructor = InstructorFactory(course_key=world.course_key)
world.enroll_user(world.instructor, world.course_key)
world.log_in(
username=world.instructor.username,
password='test',
email=world.instructor.email,
name=world.instructor.profile.name
)
else:
world.role = 'staff'
# Make & register a staff member
world.staff = StaffFactory(course_key=world.course_key)
world.enroll_user(world.staff, world.course_key)
world.log_in(
username=world.staff.username,
password='test',
email=world.staff.email,
name=world.staff.profile.name
)
def go_to_section(section_name):
# section name should be one of
# course_info, membership, student_admin, data_download, analytics, send_email
world.visit(u'/courses/{}'.format(world.course_key))
world.css_click(u'a[href="/courses/{}/instructor"]'.format(world.course_key))
world.css_click('[data-section="{0}"]'.format(section_name))
@step(u'I click "([^"]*)"')
def click_a_button(step, button): # pylint: disable=unused-argument
if button == "Generate Grade Report":
# Go to the data download section of the instructor dash
go_to_section("data_download")
# Click generate grade report button
world.css_click('input[name="calculate-grades-csv"]')
# Expect to see a message that grade report is being generated
expected_msg = "The grade report is being created." \
" To view the status of the report, see" \
" Pending Tasks below."
world.wait_for_visible('#report-request-response')
assert_in(
expected_msg, world.css_text('#report-request-response'),
msg="Could not find grade report generation success message."
)
elif button == "Grading Configuration":
# Go to the data download section of the instructor dash
|
elif button == "List enrolled students' profile information":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles"]')
elif button == "Download profile information as a CSV":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles-csv"]')
else:
raise ValueError("Unrecognized button option " + button)
@step(u'I visit the "([^"]*)" tab')
def click_a_button(step, tab_name): # pylint: disable=unused-argument
# course_info, membership, student_admin, data_download, analytics, send_email
tab_name_dict = {
'Course Info': 'course_info',
'Membership': 'membership',
'Student Admin': 'student_admin',
'Data Download': 'data_download',
'Analytics': 'analytics',
'Email': 'send_email',
}
go_to_section(tab_name_dict[tab_name])
| go_to_section("data_download")
world.css_click('input[name="dump-gradeconf"]') | conditional_block |
common.py | """
Define common steps for instructor dashboard acceptance tests.
"""
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from lettuce import step, world
from mock import patch
from nose.tools import assert_in
from courseware.tests.factories import InstructorFactory, StaffFactory
@step(u'Given I am "([^"]*)" for a very large course')
def make_staff_or_instructor_for_large_course(step, role):
|
@patch.dict('courseware.access.settings.FEATURES', {"MAX_ENROLLMENT_INSTR_BUTTONS": 0})
def make_large_course(step, role):
i_am_staff_or_instructor(step, role)
@step(u'Given I am "([^"]*)" for a course')
def i_am_staff_or_instructor(step, role): # pylint: disable=unused-argument
## In summary: makes a test course, makes a new Staff or Instructor user
## (depending on `role`), and logs that user in to the course
# Store the role
assert_in(role, ['instructor', 'staff'])
# Clear existing courses to avoid conflicts
world.clear_courses()
# Create a new course
course = world.CourseFactory.create(
org='edx',
number='999',
display_name='Test Course'
)
world.course_key = course.id
world.role = 'instructor'
# Log in as the an instructor or staff for the course
if role == 'instructor':
# Make & register an instructor for the course
world.instructor = InstructorFactory(course_key=world.course_key)
world.enroll_user(world.instructor, world.course_key)
world.log_in(
username=world.instructor.username,
password='test',
email=world.instructor.email,
name=world.instructor.profile.name
)
else:
world.role = 'staff'
# Make & register a staff member
world.staff = StaffFactory(course_key=world.course_key)
world.enroll_user(world.staff, world.course_key)
world.log_in(
username=world.staff.username,
password='test',
email=world.staff.email,
name=world.staff.profile.name
)
def go_to_section(section_name):
# section name should be one of
# course_info, membership, student_admin, data_download, analytics, send_email
world.visit(u'/courses/{}'.format(world.course_key))
world.css_click(u'a[href="/courses/{}/instructor"]'.format(world.course_key))
world.css_click('[data-section="{0}"]'.format(section_name))
@step(u'I click "([^"]*)"')
def click_a_button(step, button): # pylint: disable=unused-argument
if button == "Generate Grade Report":
# Go to the data download section of the instructor dash
go_to_section("data_download")
# Click generate grade report button
world.css_click('input[name="calculate-grades-csv"]')
# Expect to see a message that grade report is being generated
expected_msg = "The grade report is being created." \
" To view the status of the report, see" \
" Pending Tasks below."
world.wait_for_visible('#report-request-response')
assert_in(
expected_msg, world.css_text('#report-request-response'),
msg="Could not find grade report generation success message."
)
elif button == "Grading Configuration":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="dump-gradeconf"]')
elif button == "List enrolled students' profile information":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles"]')
elif button == "Download profile information as a CSV":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles-csv"]')
else:
raise ValueError("Unrecognized button option " + button)
@step(u'I visit the "([^"]*)" tab')
def click_a_button(step, tab_name): # pylint: disable=unused-argument
# course_info, membership, student_admin, data_download, analytics, send_email
tab_name_dict = {
'Course Info': 'course_info',
'Membership': 'membership',
'Student Admin': 'student_admin',
'Data Download': 'data_download',
'Analytics': 'analytics',
'Email': 'send_email',
}
go_to_section(tab_name_dict[tab_name])
| make_large_course(step, role) | identifier_body |
common.py | """
Define common steps for instructor dashboard acceptance tests.
"""
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from lettuce import step, world
from mock import patch
from nose.tools import assert_in
from courseware.tests.factories import InstructorFactory, StaffFactory
@step(u'Given I am "([^"]*)" for a very large course')
def make_staff_or_instructor_for_large_course(step, role):
make_large_course(step, role)
@patch.dict('courseware.access.settings.FEATURES', {"MAX_ENROLLMENT_INSTR_BUTTONS": 0})
def make_large_course(step, role):
i_am_staff_or_instructor(step, role)
@step(u'Given I am "([^"]*)" for a course')
def i_am_staff_or_instructor(step, role): # pylint: disable=unused-argument
## In summary: makes a test course, makes a new Staff or Instructor user
## (depending on `role`), and logs that user in to the course
# Store the role
assert_in(role, ['instructor', 'staff'])
# Clear existing courses to avoid conflicts
world.clear_courses()
# Create a new course
course = world.CourseFactory.create(
org='edx',
number='999',
display_name='Test Course'
)
world.course_key = course.id
world.role = 'instructor'
# Log in as the an instructor or staff for the course
if role == 'instructor':
# Make & register an instructor for the course
world.instructor = InstructorFactory(course_key=world.course_key)
world.enroll_user(world.instructor, world.course_key)
world.log_in(
username=world.instructor.username,
password='test',
email=world.instructor.email,
name=world.instructor.profile.name
)
else:
world.role = 'staff'
# Make & register a staff member
world.staff = StaffFactory(course_key=world.course_key)
world.enroll_user(world.staff, world.course_key)
world.log_in(
username=world.staff.username,
password='test',
email=world.staff.email,
name=world.staff.profile.name
)
def | (section_name):
# section name should be one of
# course_info, membership, student_admin, data_download, analytics, send_email
world.visit(u'/courses/{}'.format(world.course_key))
world.css_click(u'a[href="/courses/{}/instructor"]'.format(world.course_key))
world.css_click('[data-section="{0}"]'.format(section_name))
@step(u'I click "([^"]*)"')
def click_a_button(step, button): # pylint: disable=unused-argument
if button == "Generate Grade Report":
# Go to the data download section of the instructor dash
go_to_section("data_download")
# Click generate grade report button
world.css_click('input[name="calculate-grades-csv"]')
# Expect to see a message that grade report is being generated
expected_msg = "The grade report is being created." \
" To view the status of the report, see" \
" Pending Tasks below."
world.wait_for_visible('#report-request-response')
assert_in(
expected_msg, world.css_text('#report-request-response'),
msg="Could not find grade report generation success message."
)
elif button == "Grading Configuration":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="dump-gradeconf"]')
elif button == "List enrolled students' profile information":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles"]')
elif button == "Download profile information as a CSV":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles-csv"]')
else:
raise ValueError("Unrecognized button option " + button)
@step(u'I visit the "([^"]*)" tab')
def click_a_button(step, tab_name): # pylint: disable=unused-argument
# course_info, membership, student_admin, data_download, analytics, send_email
tab_name_dict = {
'Course Info': 'course_info',
'Membership': 'membership',
'Student Admin': 'student_admin',
'Data Download': 'data_download',
'Analytics': 'analytics',
'Email': 'send_email',
}
go_to_section(tab_name_dict[tab_name])
| go_to_section | identifier_name |
swap_network_trotter_hubbard.py | See the License for the specific language governing permissions and
# limitations under the License.
"""A variational ansatz based on a linear swap network Trotter step."""
from typing import Iterable, Optional, Sequence, Tuple, cast
import numpy
import sympy
import cirq
from openfermioncirq import swap_network
from openfermioncirq.variational.ansatz import VariationalAnsatz
from openfermioncirq.variational.letter_with_subscripts import (
LetterWithSubscripts)
class SwapNetworkTrotterHubbardAnsatz(VariationalAnsatz):
"""A Hubbard model ansatz based on the fermionic swap network Trotter step.
Each Trotter step includes 3 parameters: one for the horizontal hopping
terms, one for the vertical hopping terms, and one for the on-site
interaction. This ansatz is similar to the one used in arXiv:1507.08969,
but corresponds to a different ordering for simulating the Hamiltonian
terms.
"""
def __init__(self,
x_dim: float,
y_dim: float,
tunneling: float,
coulomb: float,
periodic: bool=True,
iterations: int=1,
adiabatic_evolution_time: Optional[float]=None,
qubits: Optional[Sequence[cirq.Qid]]=None
) -> None:
"""
Args:
iterations: The number of iterations of the basic template to
include in the circuit. The number of parameters grows linearly
with this value.
adiabatic_evolution_time: The time scale for Hamiltonian evolution
used to determine the default initial parameters of the ansatz.
This is the value A from the docstring of this class.
If not specified, defaults to the sum of the absolute values
of the entries of the two-body tensor of the Hamiltonian.
qubits: Qubits to be used by the ansatz circuit. If not specified,
then qubits will automatically be generated by the
`_generate_qubits` method.
"""
self.x_dim = x_dim
self.y_dim = y_dim
self.tunneling = tunneling
self.coulomb = coulomb
self.periodic = periodic
self.iterations = iterations
if adiabatic_evolution_time is None:
adiabatic_evolution_time = 0.1*abs(coulomb)*iterations
self.adiabatic_evolution_time = cast(float, adiabatic_evolution_time)
super().__init__(qubits)
def params(self) -> Iterable[sympy.Symbol]:
"""The parameters of the ansatz."""
for i in range(self.iterations):
if self.x_dim > 1:
yield LetterWithSubscripts('Th', i)
if self.y_dim > 1:
yield LetterWithSubscripts('Tv', i)
yield LetterWithSubscripts('V', i)
def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:
"""Bounds on the parameters."""
bounds = []
for param in self.params():
s = 1.0 if param.letter == 'V' else 2.0
bounds.append((-s, s))
return bounds
def _generate_qubits(self) -> Sequence[cirq.Qid]:
"""Produce qubits that can be used by the ansatz circuit."""
n_qubits = 2*self.x_dim*self.y_dim
return cirq.LineQubit.range(n_qubits)
def operations(self, qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE:
"""Produce the operations of the ansatz circuit."""
for i in range(self.iterations):
# Apply one- and two-body interactions with a swap network that
# reverses the order of the modes
def one_and_two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction, fermionic=True)
qubits = qubits[::-1]
# Apply one- and two-body interactions again. This time, reorder
# them so that the entire iteration is symmetric
def one_and_two_body_interaction_reversed_order(p, q, a, b
) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction_reversed_order,
fermionic=True, offset=True)
qubits = qubits[::-1]
def default_initial_params(self) -> numpy.ndarray:
"""Approximate evolution by H(t) = T + (t/A)V.
Sets the parameters so that the ansatz circuit consists of a sequence
of second-order Trotter steps approximating the dynamics of the
time-dependent Hamiltonian H(t) = T + (t/A)V, where T is the one-body
term and V is the two-body term of the Hamiltonian used to generate the
ansatz circuit, and t ranges from 0 to A, where A is equal to
`self.adibatic_evolution_time`. The number of Trotter steps
is equal to the number of iterations in the ansatz. This choice is
motivated by the idea of state preparation via adiabatic evolution.
The dynamics of H(t) are approximated as follows. First, the total
evolution time of A is split into segments of length A / r, where r
is the number of Trotter steps. Then, each Trotter step simulates H(t)
for a time length of A / r, where t is the midpoint of the
corresponding time segment. As an example, suppose A is 100 and the
ansatz has two iterations. Then the approximation is achieved with two
Trotter steps. The first Trotter step simulates H(25) for a time length
of 50, and the second Trotter step simulates H(75) for a time length
of 50.
"""
total_time = self.adiabatic_evolution_time
step_time = total_time / self.iterations
params = []
for param, scale_factor in zip(self.params(),
self.param_scale_factors()):
if param.letter == 'Th' or param.letter == 'Tv':
params.append(_canonicalize_exponent(
-self.tunneling * step_time / numpy.pi, 4) / scale_factor)
elif param.letter == 'V':
i, = param.subscripts
# Use the midpoint of the time segment
interpolation_progress = 0.5 * (2 * i + 1) / self.iterations
params.append(_canonicalize_exponent(
-0.5 * self.coulomb * interpolation_progress *
step_time / numpy.pi, 2) / scale_factor)
return numpy.array(params)
def | (p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _right_neighbor(p, x_dim, y_dim, periodic)
or p == _right_neighbor(q, x_dim, y_dim, periodic))
def _is_vertical_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _bottom_neighbor(p, x_dim, y_dim, periodic)
or p == _bottom_neighbor(q, x_dim, y_dim, periodic))
def _are_same_site_opposite_spin(p, q, n_sites | _is_horizontal_edge | identifier_name |
swap_network_trotter_hubbard.py | fermioncirq.variational.ansatz import VariationalAnsatz
from openfermioncirq.variational.letter_with_subscripts import (
LetterWithSubscripts)
class SwapNetworkTrotterHubbardAnsatz(VariationalAnsatz):
"""A Hubbard model ansatz based on the fermionic swap network Trotter step.
Each Trotter step includes 3 parameters: one for the horizontal hopping
terms, one for the vertical hopping terms, and one for the on-site
interaction. This ansatz is similar to the one used in arXiv:1507.08969,
but corresponds to a different ordering for simulating the Hamiltonian
terms.
"""
def __init__(self,
x_dim: float,
y_dim: float,
tunneling: float,
coulomb: float,
periodic: bool=True,
iterations: int=1,
adiabatic_evolution_time: Optional[float]=None,
qubits: Optional[Sequence[cirq.Qid]]=None
) -> None:
"""
Args:
iterations: The number of iterations of the basic template to
include in the circuit. The number of parameters grows linearly
with this value.
adiabatic_evolution_time: The time scale for Hamiltonian evolution
used to determine the default initial parameters of the ansatz.
This is the value A from the docstring of this class.
If not specified, defaults to the sum of the absolute values
of the entries of the two-body tensor of the Hamiltonian.
qubits: Qubits to be used by the ansatz circuit. If not specified,
then qubits will automatically be generated by the
`_generate_qubits` method.
"""
self.x_dim = x_dim
self.y_dim = y_dim
self.tunneling = tunneling
self.coulomb = coulomb
self.periodic = periodic
self.iterations = iterations
if adiabatic_evolution_time is None:
adiabatic_evolution_time = 0.1*abs(coulomb)*iterations
self.adiabatic_evolution_time = cast(float, adiabatic_evolution_time)
super().__init__(qubits)
def params(self) -> Iterable[sympy.Symbol]:
"""The parameters of the ansatz."""
for i in range(self.iterations):
if self.x_dim > 1:
yield LetterWithSubscripts('Th', i)
if self.y_dim > 1:
yield LetterWithSubscripts('Tv', i)
yield LetterWithSubscripts('V', i)
def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:
"""Bounds on the parameters."""
bounds = []
for param in self.params():
s = 1.0 if param.letter == 'V' else 2.0
bounds.append((-s, s))
return bounds
def _generate_qubits(self) -> Sequence[cirq.Qid]:
"""Produce qubits that can be used by the ansatz circuit."""
n_qubits = 2*self.x_dim*self.y_dim
return cirq.LineQubit.range(n_qubits)
def operations(self, qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE:
"""Produce the operations of the ansatz circuit."""
for i in range(self.iterations):
# Apply one- and two-body interactions with a swap network that
# reverses the order of the modes
def one_and_two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction, fermionic=True)
qubits = qubits[::-1]
# Apply one- and two-body interactions again. This time, reorder
# them so that the entire iteration is symmetric
def one_and_two_body_interaction_reversed_order(p, q, a, b
) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction_reversed_order,
fermionic=True, offset=True)
qubits = qubits[::-1]
def default_initial_params(self) -> numpy.ndarray:
"""Approximate evolution by H(t) = T + (t/A)V.
Sets the parameters so that the ansatz circuit consists of a sequence
of second-order Trotter steps approximating the dynamics of the
time-dependent Hamiltonian H(t) = T + (t/A)V, where T is the one-body
term and V is the two-body term of the Hamiltonian used to generate the
ansatz circuit, and t ranges from 0 to A, where A is equal to
`self.adibatic_evolution_time`. The number of Trotter steps
is equal to the number of iterations in the ansatz. This choice is
motivated by the idea of state preparation via adiabatic evolution.
The dynamics of H(t) are approximated as follows. First, the total
evolution time of A is split into segments of length A / r, where r
is the number of Trotter steps. Then, each Trotter step simulates H(t)
for a time length of A / r, where t is the midpoint of the
corresponding time segment. As an example, suppose A is 100 and the
ansatz has two iterations. Then the approximation is achieved with two
Trotter steps. The first Trotter step simulates H(25) for a time length
of 50, and the second Trotter step simulates H(75) for a time length
of 50.
"""
total_time = self.adiabatic_evolution_time
step_time = total_time / self.iterations
params = []
for param, scale_factor in zip(self.params(),
self.param_scale_factors()):
if param.letter == 'Th' or param.letter == 'Tv':
params.append(_canonicalize_exponent(
-self.tunneling * step_time / numpy.pi, 4) / scale_factor)
elif param.letter == 'V':
i, = param.subscripts
# Use the midpoint of the time segment
interpolation_progress = 0.5 * (2 * i + 1) / self.iterations
params.append(_canonicalize_exponent(
-0.5 * self.coulomb * interpolation_progress *
step_time / numpy.pi, 2) / scale_factor)
return numpy.array(params)
def _is_horizontal_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _right_neighbor(p, x_dim, y_dim, periodic)
or p == _right_neighbor(q, x_dim, y_dim, periodic))
def _is_vertical_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _bottom_neighbor(p, x_dim, y_dim, periodic)
or p == _bottom_neighbor(q, x_dim, y_dim, periodic))
def _are_same_site_opposite_spin(p, q, n_sites):
return abs(p-q) == n_sites
def _right_neighbor(site, x_dimension, y_dimension, periodic):
if x_dimension == 1:
return None
if (site + 1) % x_dimension == 0:
| if periodic:
return site + 1 - x_dimension
else:
return None | conditional_block |
|
swap_network_trotter_hubbard.py | # See the License for the specific language governing permissions and
# limitations under the License.
"""A variational ansatz based on a linear swap network Trotter step."""
from typing import Iterable, Optional, Sequence, Tuple, cast
import numpy
import sympy
import cirq
from openfermioncirq import swap_network
from openfermioncirq.variational.ansatz import VariationalAnsatz
from openfermioncirq.variational.letter_with_subscripts import (
LetterWithSubscripts)
class SwapNetworkTrotterHubbardAnsatz(VariationalAnsatz):
"""A Hubbard model ansatz based on the fermionic swap network Trotter step.
Each Trotter step includes 3 parameters: one for the horizontal hopping
terms, one for the vertical hopping terms, and one for the on-site
interaction. This ansatz is similar to the one used in arXiv:1507.08969,
but corresponds to a different ordering for simulating the Hamiltonian
terms.
"""
def __init__(self,
x_dim: float,
y_dim: float,
tunneling: float,
coulomb: float,
periodic: bool=True,
iterations: int=1,
adiabatic_evolution_time: Optional[float]=None,
qubits: Optional[Sequence[cirq.Qid]]=None
) -> None:
"""
Args:
iterations: The number of iterations of the basic template to
include in the circuit. The number of parameters grows linearly
with this value.
adiabatic_evolution_time: The time scale for Hamiltonian evolution
used to determine the default initial parameters of the ansatz.
This is the value A from the docstring of this class.
If not specified, defaults to the sum of the absolute values
of the entries of the two-body tensor of the Hamiltonian.
qubits: Qubits to be used by the ansatz circuit. If not specified,
then qubits will automatically be generated by the
`_generate_qubits` method.
"""
self.x_dim = x_dim
self.y_dim = y_dim
self.tunneling = tunneling
self.coulomb = coulomb
self.periodic = periodic
self.iterations = iterations
if adiabatic_evolution_time is None:
adiabatic_evolution_time = 0.1*abs(coulomb)*iterations
self.adiabatic_evolution_time = cast(float, adiabatic_evolution_time)
super().__init__(qubits)
def params(self) -> Iterable[sympy.Symbol]:
"""The parameters of the ansatz."""
for i in range(self.iterations):
if self.x_dim > 1:
yield LetterWithSubscripts('Th', i)
if self.y_dim > 1:
yield LetterWithSubscripts('Tv', i)
yield LetterWithSubscripts('V', i)
def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:
"""Bounds on the parameters."""
bounds = []
for param in self.params():
s = 1.0 if param.letter == 'V' else 2.0
bounds.append((-s, s))
return bounds
def _generate_qubits(self) -> Sequence[cirq.Qid]:
"""Produce qubits that can be used by the ansatz circuit."""
n_qubits = 2*self.x_dim*self.y_dim
return cirq.LineQubit.range(n_qubits)
def operations(self, qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE:
"""Produce the operations of the ansatz circuit."""
for i in range(self.iterations):
# Apply one- and two-body interactions with a swap network that
# reverses the order of the modes
def one_and_two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction, fermionic=True)
qubits = qubits[::-1]
# Apply one- and two-body interactions again. This time, reorder
# them so that the entire iteration is symmetric
def one_and_two_body_interaction_reversed_order(p, q, a, b
) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b) | fermionic=True, offset=True)
qubits = qubits[::-1]
def default_initial_params(self) -> numpy.ndarray:
"""Approximate evolution by H(t) = T + (t/A)V.
Sets the parameters so that the ansatz circuit consists of a sequence
of second-order Trotter steps approximating the dynamics of the
time-dependent Hamiltonian H(t) = T + (t/A)V, where T is the one-body
term and V is the two-body term of the Hamiltonian used to generate the
ansatz circuit, and t ranges from 0 to A, where A is equal to
`self.adibatic_evolution_time`. The number of Trotter steps
is equal to the number of iterations in the ansatz. This choice is
motivated by the idea of state preparation via adiabatic evolution.
The dynamics of H(t) are approximated as follows. First, the total
evolution time of A is split into segments of length A / r, where r
is the number of Trotter steps. Then, each Trotter step simulates H(t)
for a time length of A / r, where t is the midpoint of the
corresponding time segment. As an example, suppose A is 100 and the
ansatz has two iterations. Then the approximation is achieved with two
Trotter steps. The first Trotter step simulates H(25) for a time length
of 50, and the second Trotter step simulates H(75) for a time length
of 50.
"""
total_time = self.adiabatic_evolution_time
step_time = total_time / self.iterations
params = []
for param, scale_factor in zip(self.params(),
self.param_scale_factors()):
if param.letter == 'Th' or param.letter == 'Tv':
params.append(_canonicalize_exponent(
-self.tunneling * step_time / numpy.pi, 4) / scale_factor)
elif param.letter == 'V':
i, = param.subscripts
# Use the midpoint of the time segment
interpolation_progress = 0.5 * (2 * i + 1) / self.iterations
params.append(_canonicalize_exponent(
-0.5 * self.coulomb * interpolation_progress *
step_time / numpy.pi, 2) / scale_factor)
return numpy.array(params)
def _is_horizontal_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _right_neighbor(p, x_dim, y_dim, periodic)
or p == _right_neighbor(q, x_dim, y_dim, periodic))
def _is_vertical_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _bottom_neighbor(p, x_dim, y_dim, periodic)
or p == _bottom_neighbor(q, x_dim, y_dim, periodic))
def _are_same_site_opposite_spin(p, q, n | yield swap_network(
qubits, one_and_two_body_interaction_reversed_order, | random_line_split |
swap_network_trotter_hubbard.py | See the License for the specific language governing permissions and
# limitations under the License.
"""A variational ansatz based on a linear swap network Trotter step."""
from typing import Iterable, Optional, Sequence, Tuple, cast
import numpy
import sympy
import cirq
from openfermioncirq import swap_network
from openfermioncirq.variational.ansatz import VariationalAnsatz
from openfermioncirq.variational.letter_with_subscripts import (
LetterWithSubscripts)
class SwapNetworkTrotterHubbardAnsatz(VariationalAnsatz):
"""A Hubbard model ansatz based on the fermionic swap network Trotter step.
Each Trotter step includes 3 parameters: one for the horizontal hopping
terms, one for the vertical hopping terms, and one for the on-site
interaction. This ansatz is similar to the one used in arXiv:1507.08969,
but corresponds to a different ordering for simulating the Hamiltonian
terms.
"""
def __init__(self,
x_dim: float,
y_dim: float,
tunneling: float,
coulomb: float,
periodic: bool=True,
iterations: int=1,
adiabatic_evolution_time: Optional[float]=None,
qubits: Optional[Sequence[cirq.Qid]]=None
) -> None:
"""
Args:
iterations: The number of iterations of the basic template to
include in the circuit. The number of parameters grows linearly
with this value.
adiabatic_evolution_time: The time scale for Hamiltonian evolution
used to determine the default initial parameters of the ansatz.
This is the value A from the docstring of this class.
If not specified, defaults to the sum of the absolute values
of the entries of the two-body tensor of the Hamiltonian.
qubits: Qubits to be used by the ansatz circuit. If not specified,
then qubits will automatically be generated by the
`_generate_qubits` method.
"""
self.x_dim = x_dim
self.y_dim = y_dim
self.tunneling = tunneling
self.coulomb = coulomb
self.periodic = periodic
self.iterations = iterations
if adiabatic_evolution_time is None:
adiabatic_evolution_time = 0.1*abs(coulomb)*iterations
self.adiabatic_evolution_time = cast(float, adiabatic_evolution_time)
super().__init__(qubits)
def params(self) -> Iterable[sympy.Symbol]:
"""The parameters of the ansatz."""
for i in range(self.iterations):
if self.x_dim > 1:
yield LetterWithSubscripts('Th', i)
if self.y_dim > 1:
yield LetterWithSubscripts('Tv', i)
yield LetterWithSubscripts('V', i)
def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:
"""Bounds on the parameters."""
bounds = []
for param in self.params():
s = 1.0 if param.letter == 'V' else 2.0
bounds.append((-s, s))
return bounds
def _generate_qubits(self) -> Sequence[cirq.Qid]:
|
def operations(self, qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE:
"""Produce the operations of the ansatz circuit."""
for i in range(self.iterations):
# Apply one- and two-body interactions with a swap network that
# reverses the order of the modes
def one_and_two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction, fermionic=True)
qubits = qubits[::-1]
# Apply one- and two-body interactions again. This time, reorder
# them so that the entire iteration is symmetric
def one_and_two_body_interaction_reversed_order(p, q, a, b
) -> cirq.OP_TREE:
th_symbol = LetterWithSubscripts('Th', i)
tv_symbol = LetterWithSubscripts('Tv', i)
v_symbol = LetterWithSubscripts('V', i)
if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim):
yield cirq.CZPowGate(exponent=v_symbol).on(a, b)
if _is_vertical_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b)
if _is_horizontal_edge(
p, q, self.x_dim, self.y_dim, self.periodic):
yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b)
yield swap_network(
qubits, one_and_two_body_interaction_reversed_order,
fermionic=True, offset=True)
qubits = qubits[::-1]
def default_initial_params(self) -> numpy.ndarray:
"""Approximate evolution by H(t) = T + (t/A)V.
Sets the parameters so that the ansatz circuit consists of a sequence
of second-order Trotter steps approximating the dynamics of the
time-dependent Hamiltonian H(t) = T + (t/A)V, where T is the one-body
term and V is the two-body term of the Hamiltonian used to generate the
ansatz circuit, and t ranges from 0 to A, where A is equal to
`self.adibatic_evolution_time`. The number of Trotter steps
is equal to the number of iterations in the ansatz. This choice is
motivated by the idea of state preparation via adiabatic evolution.
The dynamics of H(t) are approximated as follows. First, the total
evolution time of A is split into segments of length A / r, where r
is the number of Trotter steps. Then, each Trotter step simulates H(t)
for a time length of A / r, where t is the midpoint of the
corresponding time segment. As an example, suppose A is 100 and the
ansatz has two iterations. Then the approximation is achieved with two
Trotter steps. The first Trotter step simulates H(25) for a time length
of 50, and the second Trotter step simulates H(75) for a time length
of 50.
"""
total_time = self.adiabatic_evolution_time
step_time = total_time / self.iterations
params = []
for param, scale_factor in zip(self.params(),
self.param_scale_factors()):
if param.letter == 'Th' or param.letter == 'Tv':
params.append(_canonicalize_exponent(
-self.tunneling * step_time / numpy.pi, 4) / scale_factor)
elif param.letter == 'V':
i, = param.subscripts
# Use the midpoint of the time segment
interpolation_progress = 0.5 * (2 * i + 1) / self.iterations
params.append(_canonicalize_exponent(
-0.5 * self.coulomb * interpolation_progress *
step_time / numpy.pi, 2) / scale_factor)
return numpy.array(params)
def _is_horizontal_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _right_neighbor(p, x_dim, y_dim, periodic)
or p == _right_neighbor(q, x_dim, y_dim, periodic))
def _is_vertical_edge(p, q, x_dim, y_dim, periodic):
n_sites = x_dim*y_dim
if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites:
return False
if p >= n_sites and q >= n_sites:
p -= n_sites
q -= n_sites
return (q == _bottom_neighbor(p, x_dim, y_dim, periodic)
or p == _bottom_neighbor(q, x_dim, y_dim, periodic))
def _are_same_site_opposite_spin(p, q, n | """Produce qubits that can be used by the ansatz circuit."""
n_qubits = 2*self.x_dim*self.y_dim
return cirq.LineQubit.range(n_qubits) | identifier_body |
f23.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unreachable_code)]
pub fn | () {
let mut x = 23;
let mut y = 23;
let mut z = 23;
while x > 0 {
x -= 1;
while y > 0 {
y -= 1;
while z > 0 { z -= 1; }
if x > 10 {
return;
"unreachable";
}
}
}
}
| expr_while_23 | identifier_name |
f23.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unreachable_code)]
pub fn expr_while_23() {
let mut x = 23;
let mut y = 23;
let mut z = 23;
while x > 0 {
x -= 1;
while y > 0 {
y -= 1;
while z > 0 { z -= 1; }
if x > 10 |
}
}
}
| {
return;
"unreachable";
} | conditional_block |
f23.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | #[allow(unreachable_code)]
pub fn expr_while_23() {
let mut x = 23;
let mut y = 23;
let mut z = 23;
while x > 0 {
x -= 1;
while y > 0 {
y -= 1;
while z > 0 { z -= 1; }
if x > 10 {
return;
"unreachable";
}
}
}
} | // option. This file may not be copied, modified, or distributed
// except according to those terms.
| random_line_split |
f23.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(unreachable_code)]
pub fn expr_while_23() | {
let mut x = 23;
let mut y = 23;
let mut z = 23;
while x > 0 {
x -= 1;
while y > 0 {
y -= 1;
while z > 0 { z -= 1; }
if x > 10 {
return;
"unreachable";
}
}
}
} | identifier_body |
|
download.py | # This file is part of the "upq" program used on springfiles.com to manage file
# uploads, mirror distribution etc. It is published under the GPLv3.
#
#Copyright (C) 2011 Daniel Troeder (daniel #at# admin-box #dot# com)
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
# downloads a file
from upqjob import UpqJob
from upqdb import UpqDB
from time import time
import os
import shutil
import requests
class Download(UpqJob):
| """
"download url:$url"
"""
def run(self):
url=self.jobdata['url']
filename=os.path.basename(url)
tmpfile=os.path.join(self.getcfg('temppath', '/tmp'), filename)
self.jobdata['file']=tmpfile
self.logger.debug("going to download %s", url)
try:
response = requests.get(url, stream=True, verify=False)
with open(tmpfile, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
del response
self.logger.debug("downloaded to %s", tmpfile)
except Exception as e:
self.logger.error(str(e))
return False
return True | identifier_body |
|
download.py | # This file is part of the "upq" program used on springfiles.com to manage file
# uploads, mirror distribution etc. It is published under the GPLv3.
#
#Copyright (C) 2011 Daniel Troeder (daniel #at# admin-box #dot# com)
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
# downloads a file
from upqjob import UpqJob
from upqdb import UpqDB
from time import time
import os
import shutil
import requests
class Download(UpqJob):
"""
"download url:$url"
"""
def run(self): | self.logger.debug("going to download %s", url)
try:
response = requests.get(url, stream=True, verify=False)
with open(tmpfile, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
del response
self.logger.debug("downloaded to %s", tmpfile)
except Exception as e:
self.logger.error(str(e))
return False
return True | url=self.jobdata['url']
filename=os.path.basename(url)
tmpfile=os.path.join(self.getcfg('temppath', '/tmp'), filename)
self.jobdata['file']=tmpfile | random_line_split |
download.py | # This file is part of the "upq" program used on springfiles.com to manage file
# uploads, mirror distribution etc. It is published under the GPLv3.
#
#Copyright (C) 2011 Daniel Troeder (daniel #at# admin-box #dot# com)
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
# downloads a file
from upqjob import UpqJob
from upqdb import UpqDB
from time import time
import os
import shutil
import requests
class Download(UpqJob):
"""
"download url:$url"
"""
def | (self):
url=self.jobdata['url']
filename=os.path.basename(url)
tmpfile=os.path.join(self.getcfg('temppath', '/tmp'), filename)
self.jobdata['file']=tmpfile
self.logger.debug("going to download %s", url)
try:
response = requests.get(url, stream=True, verify=False)
with open(tmpfile, 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
del response
self.logger.debug("downloaded to %s", tmpfile)
except Exception as e:
self.logger.error(str(e))
return False
return True
| run | identifier_name |
distributions.py | identity = {
# https://www.census.gov/prod/cen2010/briefs/c2010br-03.pdf
'sex': [('M',49.2),('F',50.8)],
# https://en.wikipedia.org/wiki/Race_and_ethnicity_in_the_United_States
'race': [('O',72.4),('U',12.6)]
}
iq = {
# Class: (mu, sigma)
# http://www.iqcomparisonsite.com/sexdifferences.aspx
'M': (103.08, 14.54),
'F': (101.41, 13.55),
| # http://isteve.blogspot.com/2005/12/do-black-women-have-higher-iqs-than.html
# See the URL above for the provenance of the figures. As heritable measures of IQ,
# they are probably mostly garbage. But they provide a representative basis for a
# certain kind of "scientific" view of the world. And they were the only ones
# I came across that broke down mu and sigma values by sex and race.
'UF': (90.8, 13.58),
'UM': (88.4, 13.30),
'OF': (103.6, 13.30),
'OM': (102.7, 14.75)
} | # https://commons.wikimedia.org/wiki/File:WAIS-IV_FSIQ_Scores_by_Race_and_Ethnicity.png
'O': (103.21, 13.77),
'U': (88.67, 13.68),
| random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.